repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
google/closure-compiler
34,550
test/com/google/javascript/jscomp/FlowSensitiveInlineVariablesTest.java
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.javascript.rhino.Node; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit tests for {@link FlowSensitiveInlineVariables}. */ @RunWith(JUnit4.class) public final class FlowSensitiveInlineVariablesTest extends CompilerTestCase { public static final String EXTERN_FUNCTIONS = """ var print; var alert; /** @nosideeffects */ function noSFX() {} function hasSFX() {} """; @Override public void setUp() throws Exception { super.setUp(); enableNormalize(); // TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary. enableNormalizeExpectedOutput(); } @Override protected int getNumRepetitions() { // Test repeatedly inline. return 3; } @Override protected CompilerPass getProcessor(final Compiler compiler) { return new CompilerPass() { @Override public void process(Node externs, Node root) { new PureFunctionIdentifier.Driver(compiler).process(externs, root); new FlowSensitiveInlineVariables(compiler).process(externs, root); } }; } @Test public void testSimpleAssign() { inline("var x; x = 1; print(x)", "var x; print(1)"); inline("var x; x = 1; x", "var x; 1"); inline("var x; x = 1; var a = x", "var x; var a = 1"); inline("var x; x = 1; x = x + 1", "var x; x = 1 + 1"); } @Test public void testSimpleVar() { inline("var x = 1; print(x)", "var x; print(1)"); inline("var x = 1; x", "var x; 1"); inline("var x = 1; var a = x", "var x; var a = 1"); inline("var x = 1; x = x + 1", "var x; x = 1 + 1"); } @Test public void testSimpleLet() { inline("let x = 1; print(x)", "let x; print(1)"); inline("let x = 1; x", "let x; 1"); inline("let x = 1; let a = x", "let x; let a = 1"); inline("let x = 1; x = x + 1", "let x; x = 1 + 1"); } @Test public void testSimpleConst() { inline("const x = 1; print(x)", "const x = undefined; print(1)"); inline("const x = 1; x", "const x = undefined; 1"); inline("const x = 1; const a = x", "const x = undefined; const a = 1"); } @Test public void testSimpleForIn() { inline( "var a,b,x = a in b; x", // "var a,b,x; a in b; "); noInline("var a, b; var x = a in b; print(1); x"); noInline("var a,b,x = a in b; delete a[b]; x"); } @Test public void testExported() { noInline("var _x = 1; print(_x)"); } @Test public void testDoNotInlineIncrement() { noInline("var x = 1; x++;"); noInline("var x = 1; x--;"); } @Test public void testMultiUse() { noInline("var x; x = 1; print(x); print (x);"); } @Test public void testMultiUseInSameCfgNode() { noInline("var x; x = 1; print(x) || print (x);"); } @Test public void testMultiUseInTwoDifferentPath() { noInline("var x = 1; if (print) { print(x) } else { alert(x) }"); } @Test public void testAssignmentBeforeDefinition() { inline( "x = 1; var x = 0; print(x)", // "x = 1; var x ; print(0)"); } @Test public void testVarInConditionPath() { noInline("if (foo) { var x = 0 } print(x)"); } @Test public void testMultiDefinitionsBeforeUse() { inline("var x = 0; x = 1; print(x)", "var x = 0; print(1)"); } @Test public void testMultiDefinitionsInSameCfgNode() { noInline("var x; (x = 1) || (x = 2); print(x)"); noInline("var x; x = (1 || (x = 2)); print(x)"); noInline("var x;(x = 1) && (x = 2); print(x)"); noInline("var x;x = (1 && (x = 2)); print(x)"); noInline("var x; x = 1 , x = 2; print(x)"); } @Test public void testNotReachingDefinitions() { noInline("var x; if (foo) { x = 0 } print (x)"); } @Test public void testNoInlineLoopCarriedDefinition() { // First print is undefined instead. noInline("var x; while(true) { print(x); x = 1; }"); // Prints 0 1 1 1 1.... noInline("var x = 0; while(true) { print(x); x = 1; }"); } @Test public void testDoNotExitLoop() { noInline("while (z) { var x = 3; } var y = x;"); } @Test public void testDoNotInlineWithinLoop() { noInline("var y = noSFX(); do { var z = y.foo(); } while (true);"); } @Test public void testDoNotInlineCatchExpression1() { noInline( """ var a; try { throw Error(""); }catch(err) { a = err; } return a.stack """); } @Test public void testDoNotInlineCatchExpression1a() { noInline( """ var a; try { throw Error(""); } catch(err) { a = err + 1; } return a.stack """); } @Test public void testDoNotInlineCatchExpression2() { noInline( """ var a; try { if (x) {throw Error("");} } catch(err) { a = err; } return a.stack """); } @Test public void testDoNotInlineCatchExpression3() { noInline( """ var a; try { throw Error(""); } catch(err) { err = x; a = err; } return a.stack """); } @Test public void testDoNotInlineCatchExpression4() { // Note: it is valid to inline "x" here but we currently don't. noInline( """ try { stuff(); } catch (e) { x = e; print(x); } """); } @Test public void testDefinitionAfterUse() { inline("var x = 0; print(x); x = 1", "var x; print(0); x = 1"); } @Test public void testInlineSameVariableInStraightLine() { inline( "var x; x = 1; print(x); x = 2; print(x)", // "var x; print(1); print(2)"); } @Test public void testInlineInDifferentPaths() { inline( "var x; if (print) {x = 1; print(x)} else {x = 2; print(x)}", "var x; if (print) { print(1)} else { print(2)}"); } @Test public void testNoInlineInMergedPath() { noInline("var x,y;x = 1;while(y) { if(y){ print(x) } else { x = 1 } } print(x)"); } @Test public void testInlineIntoExpressions() { inline("var x = 1; print(x + 1);", "var x; print(1 + 1)"); } @Test public void testInlineExpressions1() { inline("var a, b; var x = a+b; print(x)", "var a, b; var x; print(a+b)"); } @Test public void testInlineExpressions2() { // We can't inline because of the redefinition of "a". noInline("var a, b; var x = a + b; a = 1; print(x)"); } @Test public void testInlineExpressions3() { inline( "var a,b,x; x=a+b; x=a-b; print( x)", // "var a,b,x; x=a+b; print(a-b)"); } @Test public void testInlineExpressions4() { // Precision is lost due to comma's. noInline("var a,b,x; x=a+b, x=a-b; print(x)"); } @Test public void testInlineExpressions5() { noInline("var a; var x = a = 1; print(x)"); } @Test public void testInlineExpressions6() { noInline("var a, x; a = 1 + (x = 1); print(x)"); } @Test public void testInlineExpression7() { // Possible side effects in foo() that might conflict with bar(); noInline("var x = foo() + 1; bar(); print(x)"); // This is a possible case but we don't have analysis to prove this yet. // TODO(user): It is possible to cover this case with the same algorithm // as the missing return check. noInline("var x = foo() + 1; print(x)"); } @Test public void testInlineExpression8() { // The same variable inlined twice. inline( "var a,b; var x = a + b; print( x); x = a - b; print( x)", "var a,b; var x ; print(a + b); print(a - b)"); } @Test public void testInlineExpression9() { // Check for actual control flow sensitivity. inline( "var a,b; var x; if (g) { x= a + b; print( x)} x = a - b; print( x)", "var a,b; var x; if (g) { print(a + b)} print(a - b)"); } @Test public void testInlineExpression10() { // The DFA is not fine grain enough for this. noInline("var x, y; x = ((y = 1), print(y))"); } @Test public void testInlineExpressions11() { inline("var x; x = x + 1; print(x)", "var x; print(x + 1)"); noInline("var x; x = x + 1; print(x); print(x)"); } @Test public void testInlineExpressions12() { // ++ is an assignment and considered to modify state so it will not be // inlined. noInline("var x = 10; x = c++; print(x)"); } @Test public void testInlineExpressions13() { inline( """ var a = 1, b = 2; var x = a; var y = b; var z = x + y; var i = z; var j = z + y; var k = i; """, """ var a, b; var x; var y = 2; var z = 1 + y; var i; var j = z + y; var k = z; """); } @Test public void testInlineExpressions14() { inline("var a = function() {}; var b = a;", "var a; var b = function() {}"); } @Test public void testNoInlineIfDefinitionMayNotReach() { noInline("var x; if (x=1) {} x;"); } @Test public void testNoInlineEscapedToInnerFunction() { noInline("var x = 1; function foo() { x = 2 }; print(x)"); } @Test public void testNoInlineLValue() { noInline("var x; if (x = 1) { print(x) }"); } @Test public void testSwitchCase() { inline("var x = 1; switch(x) { }", "var x; switch(1) { }"); } @Test public void testShadowedVariableInnerFunction() { inline( "var x = 1; print(x) || (function() { var x; x = 1; print(x)})()", "var x ; print(1) || (function() { var x; print(1)})()"); } @Test public void testCatch() { noInline("var x = 0; try { } catch (x) { }"); noInline("try { } catch (x) { print(x) }"); } @Test public void testNoInlineGetProp1() { // We don't know if j aliases a.b noInline("var x = a.b.c; j.c = 1; print(x);"); } @Test public void testNoInlineGetProp2() { noInline("var x = 1 * a.b.c; j.c = 1; print(x);"); } @Test public void testNoInlineGetProp3() { // Anything inside a function is fine. inline( "var a = {b: {}}; var x = function(){1 * a.b.c}; print(x);", "var a = {b: {}}; var x; print(function(){1 * a.b.c});"); } @Test public void testNoInlineGetElem() { // Again we don't know if i = j noInline("var x = a[i]; a[j] = 2; print(x); "); } // TODO(user): These should be inlinable. @Test public void testNoInlineConstructors() { noInline("var x = new Iterator(); x.next();"); } // TODO(user): These should be inlinable. @Test public void testNoInlineArrayLits() { noInline("var x = []; print(x)"); } // TODO(user): These should be inlinable. @Test public void testNoInlineObjectLits() { noInline("var x = {}; print(x)"); } // TODO(user): These should be inlinable after the REGEX checks. @Test public void testNoInlineRegExpLits() { noInline("var x = /y/; print(x)"); } @Test public void testInlineConstructorCallsIntoLoop() { // Don't inline construction into loops. noInline( """ var x = new Iterator(); for(i = 0; i < 10; i++) { j = x.next(); } """); } @Test public void testRemoveWithLabels() { inline( "var x = 1; L: x = 2; print(x)", // "var x = 1; L: { } print(2)"); inline( "var x = 1; L: M: x = 2; print(x)", // "var x = 1; L: M: { } print(2)"); inline( "var x = 1; L: M: N: x = 2; print(x)", // "var x = 1; L: M: N: { } print(2)"); } @Test public void testInlineAcrossSideEffect1() { // This can't be inlined because print() has side-effects and might change // the definition of noSFX. // // noSFX must be both const and pure in order to inline it. noInline("var y; var x = noSFX(y); print(x)"); // inline("var y; var x = noSFX(y); print(x)", "var y;var x;print(noSFX(y))"); } @Test public void testInlineAcrossSideEffect2() { // Think noSFX() as a function that reads y.foo and return it // and SFX() write some new value of y.foo. If that's the case, // inlining across hasSFX() is not valid. // This is a case where hasSFX is right of the source of the inlining. noInline("var y; var x = noSFX(y), z = hasSFX(y); print(x)"); noInline("var y; var x = noSFX(y), z = new hasSFX(y); print(x)"); noInline("var y; var x = new noSFX(y), z = new hasSFX(y); print(x)"); } @Test public void testInlineAcrossSideEffect3() { // This is a case where hasSFX is left of the destination of the inlining. noInline("var y; var x = noSFX(y); hasSFX(y), print(x)"); noInline("var y; var x = noSFX(y); new hasSFX(y), print(x)"); noInline("var y; var x = new noSFX(y); new hasSFX(y), print(x)"); } @Test public void testInlineAcrossSideEffect4() { // This is a case where hasSFX is some control flow path between the // source and its destination. noInline("var y; var x = noSFX(y); hasSFX(y); print(x)"); noInline("var y; var x = noSFX(y); new hasSFX(y); print(x)"); noInline("var y; var x = new noSFX(y); new hasSFX(y); print(x)"); } @Test public void testCanInlineAcrossNoSideEffect() { // This can't be inlined because print() has side-effects and might change // the definition of noSFX. We should be able to mark noSFX as const // in some way. noInline( """ var y; var x = noSFX(y), z = noSFX(); noSFX(); noSFX(), print(x) """); // inline( // "var y; var x = noSFX(y), z = noSFX(); noSFX(); noSFX(), print(x)", // "var y; var x, z = noSFX(); noSFX(); noSFX(), print(noSFX(y))"); } @Test public void testDependOnOuterScopeVariables() { noInline("var x; function foo() { var y = x; x = 0; print(y) }"); noInline("var x; function foo() { var y = x; x++; print(y) }"); // Sadly, we don't understand the data flow of outer scoped variables as // it can be modified by code outside of this scope. We can't inline // at all if the definition has dependence on such variable. noInline("var x; function foo() { var y = x; print(y) }"); } @Test public void testInlineIfNameIsLeftSideOfAssign() { inline( "var x = 1; x = print(x) + 1", // "var x ; x = print(1) + 1"); inline( "var x = 1; L: x = x + 2", // "var x ; L: x = 1 + 2"); inline( "var x = 1; x = (x = x + 1)", // "var x ; x = (x = 1 + 1)"); inline( """ // Create a block scope within the function { const C1 = 1; const C2 = 2; // `var` gives `x` a larger scope than `C1` var x = C1; x = x == C1 ? C1 * 2 : C2 * 2; } // `x` still exists here console.log(x); """, """ { const C1 = 1; const C2 = undefined; // C2 was inlined var x = C1; x = x == C1 ? C1 * 2 : 2 * 2; } // Inlining `C1` to replace `x` here would not work, since `C1` is out of scope here. console.log(x); """); inline( """ // Create a block scope within the function { const C1 = 1; const C2 = 2; // `let` gives `x` the same scope as `C1` let x = C1; x = x == C1 ? C1 * 2 : C2 * 2; } """, """ { const C1 = 1; const C2 = undefined; // C2 was inlined let x; // x was inlined x = C1 == C1 ? C1 * 2 : 2 * 2; } """); noInline("var x = 1; x = (x = (x = 10) + x)"); noInline("var x = 1; x = (f(x) + (x = 10) + x);"); noInline("var x = 1; x=-1,foo(x)"); noInline("var x = 1; x-=1,foo(x)"); } @Test public void testInlineArguments() { testSame("function _func(x) { print(x) }"); testSame("function _func(x,y) { if(y) { x = 1 }; print(x) }"); test( "function f(x, y) { x = 1; print(x) }", // "function f(x, y) { print(1) }"); test( "function f(x, y) { if (y) { x = 1; print(x) }}", "function f(x, y) { if (y) { print(1) }}"); } @Test public void testInvalidInlineArguments1() { testSame("function f(x, y) { x = 1; arguments[0] = 2; print(x) }"); testSame( """ function f(x, y) { x = 1; var z = arguments; z[0] = 2; z[1] = 3; print(x); } """); testSame("function g(a){a[0]=2} function f(x){x=1;g(arguments);print(x)}"); } @Test public void testInvalidInlineArguments2() { testSame( """ function f(c) { var f = c; arguments[0] = this; f.apply(this, arguments); return this; } """); } @Test public void testForIn() { noInline("var x; var y = {}; for(x in y){}"); noInline("var x; var y = {}; var z; for(x in z = y){print(z)}"); noInline("var x; var y = {}; var z; for(x in y){print(z)}"); } @Test public void testForInDestructuring() { noInline("var x = 1, y = [], z; for ({z = x} in y) {}"); noInline("var x = 1, y = [], z; for ([z = x] in y) {}"); noInline("var x = 1, y = [], z; print(x); for ({z = x} in y) {}"); noInline("var x = 1, y = [], z; print(x); for ([z = x] in y) {}"); noInline("var x = 1, y = [], z; print(x); for (let {z = x} in y) {}"); noInline("var x = 1, y = [], z; print(x); for (const {z = x} in y) {}"); noInline("var x = 1; if (true) { x = 3; } var y = [[0]], z = x; for ([x] in y) {}; alert(z);"); } @Test public void testNotOkToSkipCheckPathBetweenNodes() { noInline("var x; for(x = 1; foo(x);) {}"); noInline("var x; for(; x = 1;foo(x)) {}"); } @Test public void testIssue698() { // Most of the flow algorithms operate on Vars. We want to make // sure the algorithm bails out appropriately if it sees // a var that it doesn't know about. inline( """ var x = ''; unknown.length < 2 && (unknown='0' + unknown); x = x + unknown; unknown.length < 3 && (unknown='0' + unknown); x = x + unknown; return x; """, """ var x; unknown.length < 2 && (unknown='0' + unknown); x = '' + unknown; unknown.length < 3 && (unknown='0' + unknown); x = x + unknown; return x; """); } @Test public void testIssue777() { test( """ function f(cmd, ta) { var temp = cmd; var temp2 = temp >> 2; cmd = STACKTOP; for (var src = temp2, dest = cmd >> 2, stop = src + 37; src < stop; src++, dest++) { HEAP32[dest] = HEAP32[src]; } temp = ta; temp2 = temp >> 2; ta = STACKTOP; STACKTOP += 8; HEAP32[ta >> 2] = HEAP32[temp2]; HEAP32[ta + 4 >> 2] = HEAP32[temp2 + 1]; } """, """ function f(cmd, ta){ var temp; var temp2 = cmd >> 2; cmd = STACKTOP; var src = temp2; var dest = cmd >> 2; var stop = src + 37; for(;src<stop;src++,dest++)HEAP32[dest]=HEAP32[src]; temp2 = ta >> 2; ta = STACKTOP; STACKTOP += 8; HEAP32[ta>>2] = HEAP32[temp2]; HEAP32[ta+4>>2] = HEAP32[temp2+1]; } """); } @Test public void testTransitiveDependencies1() { test( "function f(x) { var a = x; var b = a; x = 3; return b; }", "function f(x) { var a; var b = x; x = 3; return b; }"); } @Test public void testTransitiveDependencies2() { test( "function f(x) { var a = x; var b = a; var c = b; x = 3; return c; }", "function f(x) { var a ; var b = x; var c ; x = 3; return b; }"); } @Test public void testIssue794a() { noInline( """ var x = 1; try { x += someFunction(); } catch (e) {} x += 1; try { x += someFunction(); } catch (e) {} return x; """); } @Test public void testIssue794b() { noInline( """ var x = 1; try { x = x + someFunction(); } catch (e) {} x = x + 1; try { x = x + someFunction(); } catch (e) {} return x; """); } @Test public void testVarAssignInsideHookIssue965() { noInline("var i = 0; return 1 ? (i = 5) : 0, i;"); noInline("var i = 0; return (1 ? (i = 5) : 0) ? i : 0;"); noInline("var i = 0; return (1 ? (i = 5) : 0) || i;"); noInline("var i = 0; return (1 ? (i = 5) : 0) * i;"); } // GitHub issue #250: https://github.com/google/closure-compiler/issues/250 @Test public void testInlineStringConcat() { test( """ function f() { var x = ''; x = x + '1'; x = x + '2'; x = x + '3'; x = x + '4'; x = x + '5'; x = x + '6'; x = x + '7'; return x; } """, "function f() { var x; return '' + '1' + '2' + '3' + '4' + '5' + '6' + '7'; }"); } @Test public void testInlineInArrowFunctions() { test( "() => {var v; v = 1; return v;} ", // "() => {var v ; return 1;}"); test( "(v) => {v = 1; return v;}", // "(v) => { return 1;}"); } @Test public void testInlineInClassMemberFunctions() { test( """ class C { func() { var x; x = 1; return x; } } """, """ class C { func() { var x; return 1; } } """); } @Test public void testInlineLet() { inline( "let a = 1; print(a + 1)", // "let a; print(1 + 1)"); inline( "let a; a = 1; print(a + 1)", // "let a; print(1 + 1)"); noInline("let a = noSFX(); print(a)"); } @Test public void testInlineConst() { inline( "const a = 1 ; print(a + 1)", // "const a = undefined; print(1 + 1)"); inline( "const a = 1; const b = a; print(b + 1)", "const a = undefined; const b = undefined; print(1 + 1)"); noInline("const a = noSFX(); print(a)"); } @Test public void testSpecific() { inline( "let a = 1; print(a + 1)", // "let a ; print(1 + 1)"); } @Test public void testBlockScoping() { inline( """ let a = 1 print(a + 1); { let b = 2; print(b + 1); } """, """ let a; print(1 + 1); { let b; print(2 + 1); } """); inline( """ let a = 1 { let a = 2; print(a + 1); } print(a + 1); """, """ let a = 1 { let a; print(2 + 1); } print(a + 1); """); inline( """ let a = 1; {let b;} print(a) """, """ let a; {let b;} print(1) """); // This test fails to inline due to CheckPathsBetweenNodes analysis in the canInline function // in FlowSensitiveInlineVariables. noInline( """ let a = 1; { let b; f(b); } return(a) """); } @Test public void testBlockScoping_shouldntInline() { noInline( """ var JSCompiler_inline_result; { let a = 1; if (3 < 4) { a = 2; } JSCompiler_inline_result = a; } alert(JSCompiler_inline_result); """); // test let/const shadowing of a var noInline( """ var JSCompiler_inline_result; var a = 0; { let a = 1; if (3 < 4) { a = 2; } JSCompiler_inline_result = a; } alert(JSCompiler_inline_result); """); noInline("{ let value = 1; var g = () => value; } return g;"); } @Test public void testInlineInGenerators() { test( """ function* f() { var x = 1; return x + 1; } """, """ function* f() { var x; return 1 + 1; } """); } @Test public void testNoInlineForOf() { noInline("for (var x of n){} "); noInline("var x = 1; var n = {}; for(x of n) {}"); } @Test public void testForOfDestructuring() { noInline("var x = 1, y = [], z; for ({z = x} of y) {}"); noInline("var x = 1, y = [], z; for ([z = x] of y) {}"); noInline("var x = 1, y = [], z; print(x); for ({z = x} of y) {}"); noInline("var x = 1, y = [], z; print(x); for ([z = x] of y) {}"); noInline("var x = 1, y = [], z; print(x); for (let [z = x] of y) {}"); noInline("var x = 1, y = [], z; print(x); for (const [z = x] of y) {}"); noInline("var x = 1; if (true) { x = 3; } var y = [[0]], z = x; for ([x] of y) {}; alert(z);"); } @Test public void testTemplateStrings() { inline( "var name = 'Foo'; `Hello ${ name}`", // "var name ; `Hello ${'Foo'}`"); inline( "var name = 'Foo'; var foo = name; `Hello ${ foo }`", // "var name ; var foo ; `Hello ${'Foo'}`"); inline( "var age = 3; `Age: ${age}`", // "var age ; `Age: ${ 3}`"); } @Test public void testArrayDestructuring() { noInline("var [a, b, c] = [1, 2, 3]; print(a + b + c);"); noInline("var arr = [1, 2, 3, 4]; var [a, b, ,d] = arr;"); noInline("var x = 3; [x] = 4; print(x);"); inline("var [x] = []; x = 3; print(x);", "var [x] = []; print(3);"); } @Test public void testObjectDestructuring() { noInline("var {a, b} = {a: 3, b: 4}; print(a + b);"); noInline("var obj = {a: 3, b: 4}; var {a, b} = obj;"); } @Test public void testDontInlineOverChangingRvalue_destructuring() { noInline("var x = 1; if (true) { x = 2; } var y = x; var [z = (x = 3, 4)] = []; print(y);"); noInline("var x = 1; if (true) { x = 2; } var y = x; [x] = []; print(y);"); noInline("var x = 1; if (true) { x = 2; } var y = x; ({x} = {}); print(y);"); noInline("var x = 1; if (true) { x = 2; } var y = x; var [z] = [x = 3]; print(y);"); } @Test public void testDestructuringDefaultValue() { inline("var x = 1; var [y = x] = [];", "var x; var [y = 1] = [];"); inline("var x = 1; var {y = x} = {};", "var x; var {y = 1} = {};"); inline("var x = 1; var {[3]: y = x} = {};", "var x; var {[3]: y = 1} = {};"); noInline("var x = 1; var {[x]: y = x} = {};"); noInline("var x = 1; var [y = x] = []; print(x);"); // don't inline because x is only conditionally reassigned to 2. noInline("var x = 1; var [y = (x = 2, 4)] = []; print(x);"); noInline("var x = 1; print(x); var [y = (x = 2, 4)] = [0]; print(x);"); // x = 2 is executed before reading x in the default value. inline( "var x = 1; print(x); var obj = {}; [obj[x = 2] = x] = [];", "var x ; print(1); var obj = {}; [obj[x = 2] = x] = [];"); // [x] is evaluated before obj[x = 2] is executed noInline("var x = 1; print(x); var obj = {}; [[obj[x = 2]] = [x]] = [];"); noInline("var x = 1; alert(x); ({x = x * 2} = {});"); noInline("var x = 1; alert(x); [x = x * 2] = [];"); } @Test public void testDestructuringComputedProperty() { inline("var x = 1; var {[x]: y} = {};", "var x; var {[1]: y} = {};"); noInline("var x = 1; var {[x]: y} = {}; print(x);"); noInline("var x = 1; alert(x); ({[x]: x} = {}); alert(x);"); inline("var x = 1; var y = x; ({[y]: x} = {});", "var x; var y; ({[1]: x} = {});"); } @Test public void testDeadAssignments() { inline( "let a = 3; if (3 < 4) { a = 8; } else { print(a); }", "let a; if (3 < 4) { a = 8 } else { print(3); }"); inline( "let a = 3; if (3 < 4) { [a] = 8; } else { print(a); }", "let a; if (3 < 4) { [a] = 8 } else { print(3); }"); } @Test public void testDestructuringEvaluationOrder() { // Should not inline "x = 2" in these cases because x is changed beforehand noInline("var x = 2; var {x, y = x} = {x: 3};"); noInline("var x = 2; var {y = (x = 3), z = x} = {};"); // These examples are safe to inline, but FlowSensitiveInlineVariables never inlines variables // used twice in the same CFG node even when safe to do so. noInline("var x = 2; var {a: y = (x = 3)} = {a: x};"); noInline("var x = 1; var {a = x} = {a: (x = 2, 3)};"); noInline("var x = 2; var {a: x = 3} = {a: x};"); noInline("var x = 1; print(x); var {a: x} = {a: x};"); noInline("var x = 1; print(x); ({a: x} = {a: x});"); noInline("var x = 1; print(x); var y; [y = x, x] = [];"); inline( "var x = 1; print(x); var y; [x, y = x] = [2];", "var x ; print(1); var y; [x, y = x] = [2];"); } @Test public void testDestructuringWithSideEffects() { noInline("function f() { x++; } var x = 2; var {y = x} = {key: f()}"); noInline("function f() { x++; } var x = 2; var y = x; var {z = y} = {a: f()}"); noInline("function f() { x++; } var x = 2; var y = x; var {a = f(), b = y} = {}"); noInline("function f() { x++; } var x = 2; var y = x; var {[f()]: z = y} = {}"); noInline("function f() { x++; } var x = 2; var y = x; var {a: {b: z = y} = f()} = {};"); noInline("function f() { x++; } var x = 2; var y; var {z = (y = x, 3)} = {a: f()}; print(y);"); inline( "function f() { x++; } var x = 2; var y = x; var {z = f()} = {a: y}", "function f() { x++; } var x = 2; var y ; var {z = f()} = {a: x}"); inline( "function f() { x++; } var x = 2; var y = x; var {a = y, b = f()} = {}", "function f() { x++; } var x = 2; var y ; var {a = x, b = f()} = {}"); inline( "function f() { x++; } var x = 2; var y = x; var {[y]: z = f()} = {}", "function f() { x++; } var x = 2; var y ; var {[x]: z = f()} = {}"); inline( "function f() { x++; } var x = 2; var y = x; var {a: {b: z = f()} = y} = {};", "function f() { x++; } var x = 2; var y ; var {a: {b: z = f()} = x} = {};"); } @Test public void testGithubIssue2818() { noInline("var x = 1; var y = x; print(x++, y);"); noInline("var x = 1; var y = x; print(x = x + 3, y);"); noInline("var x = 1; var y = x; print(({x} = {x: x * 2}), y); print(x);"); } @Test public void testNoInlineOnOptionalGetProp() { // b/360959953 - github issue 4187 // `const t2 = y?.left` should not get inlined into `node.right = t2` (to be // `node.right = y?.left`), because the `y?.left` value needs to be stored here // in t2, before it is rewritten by `y.left = node`. testSame( """ function swap(node) { const y = node.right; const t2 = y?.left; y.left = node; node.right = t2; return node; } """); } @Test public void testNoInlineOnOptionalGetElem() { // b/360959953 - github issue 4187 testSame( """ function swap(node) { const y = node.right; const t2 = y?.['foo']; y['foo'] = node; node.right = t2; return node; } """); } @Test public void testNoInlineOnAwait() { test("async function f() {var x = 1; print(x) }", "async function f() { var x; print(1) }"); testSame("async function f() {var x = await 1; print(x) }"); } @Test public void testNoInlineOnYeild() { test("function *f() {var x = 1; print(x) }", "function *f() { var x; print(1) }"); testSame("function *f() {var x = yield 1; print(x) }"); } @Test public void testNoInlineOnClass() { testSame( """ function f() { const x = class {}; const y = x; } """); } @Test public void testNoInlineOnTaggedTemplate() { testSame( """ function f() { var f = (a)=>{}; const x = f`tagged`; const y = x; } """); } @Test public void testInlineOnOptionalCall() { inline("let x = 1; const y = print(x)", "let x; const y = print(1)"); inline("let x = 1; const y = print?.(x)", "let x; const y = print?.(1)"); } @Test public void testOkayToInlineWithSideEffects() { inline( "var x = 1; var y = x; var z = 1; print(z++, y);", "var x ; var y ; var z = 1; print(z++, 1);"); inline( "var x = 1; var y = x; var z = 1; print([z] = [], y);", "var x ; var y ; var z = 1; print([z] = [], 1);"); inline("var x = 1; var y = x; print(x = 3, y);", "var x; var y; print(x = 3, 1);"); inline( "var x = 1; if (true) { x = 2; } var y = x; var z; z = x = y + 1;", "var x = 1; if (true) { x = 2; } var y ; var z; z = x = x + 1;"); } private void noInline(String input) { inline(input, input); } private void inline(String input, String expected) { test( externs(EXTERN_FUNCTIONS), srcs( """ function _func() { INPUT } """ .replace("INPUT", input)), expected( """ function _func() { EXPECTED } """ .replace("EXPECTED", expected))); } }
apache/flink-ml
35,702
flink-ml-lib/src/test/java/org/apache/flink/ml/feature/UnivariateFeatureSelectorTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.ml.feature; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.ml.feature.univariatefeatureselector.UnivariateFeatureSelector; import org.apache.flink.ml.feature.univariatefeatureselector.UnivariateFeatureSelectorModel; import org.apache.flink.ml.linalg.Vector; import org.apache.flink.ml.linalg.Vectors; import org.apache.flink.ml.linalg.typeinfo.VectorTypeInfo; import org.apache.flink.ml.util.TestUtils; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.api.internal.TableImpl; import org.apache.flink.test.util.AbstractTestBase; import org.apache.flink.types.Row; import org.apache.flink.util.CloseableIterator; import org.apache.commons.collections.IteratorUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; /** Tests {@link UnivariateFeatureSelector} and {@link UnivariateFeatureSelectorModel}. */ public class UnivariateFeatureSelectorTest extends AbstractTestBase { @Rule public final TemporaryFolder tempFolder = new TemporaryFolder(); private StreamExecutionEnvironment env; private StreamTableEnvironment tEnv; private Table inputChiSqTable; private Table inputANOVATable; private Table inputFValueTable; private static final double EPS = 1.0e-5; private UnivariateFeatureSelector selectorWithChiSqTest; private UnivariateFeatureSelector selectorWithANOVATest; private UnivariateFeatureSelector selectorWithFValueTest; private static final List<Row> INPUT_CHISQ_DATA = Arrays.asList( Row.of(0.0, Vectors.dense(6.0, 7.0, 0.0, 7.0, 6.0, 0.0)), Row.of(1.0, Vectors.dense(0.0, 9.0, 6.0, 0.0, 5.0, 9.0)), Row.of(1.0, Vectors.dense(0.0, 9.0, 3.0, 0.0, 5.0, 5.0)), Row.of(1.0, Vectors.dense(0.0, 9.0, 8.0, 5.0, 6.0, 4.0).toSparse()), Row.of(2.0, Vectors.dense(8.0, 9.0, 6.0, 5.0, 4.0, 4.0).toSparse()), Row.of(2.0, Vectors.dense(8.0, 9.0, 6.0, 4.0, 0.0, 0.0).toSparse())); private static final List<Row> INPUT_ANOVA_DATA = Arrays.asList( Row.of( 1, Vectors.dense( 4.65415496e-03, 1.03550567e-01, -1.17358140e+00, 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)), Row.of( 1, Vectors.dense( -9.01651741e-01, -5.28905302e-01, 1.27636785e+00, 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)), Row.of( 1, Vectors.dense( 3.85692159e-01, -9.04639637e-01, 5.09782604e-02, 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)), Row.of( 1, Vectors.dense( 1.36264353e+00, 2.62454094e-01, 7.96306202e-01, 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)), Row.of( 1, Vectors.dense( 9.65874070e-01, 2.52773665e+00, -2.19380094e+00, 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)), Row.of( 2, Vectors.dense( 1.12324305e+01, -2.77121515e-01, 1.12740513e-01, 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)), Row.of( 2, Vectors.dense( 1.06195839e+01, -1.82891238e+00, 2.25085601e-01, 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)), Row.of( 2, Vectors.dense( 1.12806837e+01, 1.30686889e+00, 9.32839108e-02, 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)), Row.of( 2, Vectors.dense( 9.98689462e+00, 9.50808938e-01, -2.90786359e-01, 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)), Row.of( 2, Vectors.dense( 1.08428551e+01, -1.02749936e+00, 1.73951508e-01, 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)), Row.of( 3, Vectors.dense( -1.98641448e+00, 1.12811990e+01, -2.35246756e-01, 8.22809049e-01, 3.26739456e-01, 7.88268404e-01) .toSparse()), Row.of( 3, Vectors.dense( -6.09864090e-01, 1.07346276e+01, -2.18805509e-01, 7.33931213e-01, 1.42554396e-01, 7.11225605e-01) .toSparse()), Row.of( 3, Vectors.dense( -1.58481268e+00, 9.19364039e+00, -5.87490459e-02, 2.51532056e-01, 2.82729807e-01, 7.16245686e-01) .toSparse()), Row.of( 3, Vectors.dense( -2.50949277e-01, 1.12815254e+01, -6.94806734e-01, 5.93898886e-01, 5.68425656e-01, 8.49762330e-01) .toSparse()), Row.of( 3, Vectors.dense( 7.63485129e-01, 1.02605138e+01, 1.32617719e+00, 5.49682879e-01, 8.59931442e-01, 4.88677978e-02) .toSparse()), Row.of( 4, Vectors.dense( 9.34900015e-01, 4.11379043e-01, 8.65010205e+00, 9.23509168e-01, 1.16995043e-01, 5.91894106e-03) .toSparse()), Row.of( 4, Vectors.dense( 4.73734933e-01, -1.48321181e+00, 9.73349621e+00, 4.09421563e-01, 5.09375719e-01, 5.93157850e-01) .toSparse()), Row.of( 4, Vectors.dense( 3.41470679e-01, -6.88972582e-01, 9.60347938e+00, 3.62654055e-01, 2.43437468e-01, 7.13052838e-01) .toSparse()), Row.of( 4, Vectors.dense( -5.29614251e-01, -1.39262856e+00, 1.01354144e+01, 8.24123861e-01, 5.84074506e-01, 6.54461558e-01) .toSparse()), Row.of( 4, Vectors.dense( -2.99454508e-01, 2.20457263e+00, 1.14586015e+01, 5.16336729e-01, 9.99776159e-01, 3.15769738e-01) .toSparse())); private static final List<Row> INPUT_FVALUE_DATA = Arrays.asList( Row.of( 0.52516321, Vectors.dense( 0.19151945, 0.62210877, 0.43772774, 0.78535858, 0.77997581, 0.27259261)), Row.of( 0.88275782, Vectors.dense( 0.27646426, 0.80187218, 0.95813935, 0.87593263, 0.35781727, 0.50099513)), Row.of( 0.67524507, Vectors.dense( 0.68346294, 0.71270203, 0.37025075, 0.56119619, 0.50308317, 0.01376845)), Row.of( 0.76734745, Vectors.dense( 0.77282662, 0.88264119, 0.36488598, 0.61539618, 0.07538124, 0.36882401)), Row.of( 0.73909458, Vectors.dense( 0.9331401, 0.65137814, 0.39720258, 0.78873014, 0.31683612, 0.56809865)), Row.of( 0.83628141, Vectors.dense( 0.86912739, 0.43617342, 0.80214764, 0.14376682, 0.70426097, 0.70458131)), Row.of( 0.65665506, Vectors.dense( 0.21879211, 0.92486763, 0.44214076, 0.90931596, 0.05980922, 0.18428708)), Row.of( 0.58147135, Vectors.dense( 0.04735528, 0.67488094, 0.59462478, 0.53331016, 0.04332406, 0.56143308)), Row.of( 0.35603443, Vectors.dense( 0.32966845, 0.50296683, 0.11189432, 0.60719371, 0.56594464, 0.00676406)), Row.of( 0.94534373, Vectors.dense( 0.61744171, 0.91212289, 0.79052413, 0.99208147, 0.95880176, 0.79196414)), Row.of( 0.57458887, Vectors.dense( 0.28525096, 0.62491671, 0.4780938, 0.19567518, 0.38231745, 0.05387369) .toSparse()), Row.of( 0.59026777, Vectors.dense( 0.45164841, 0.98200474, 0.1239427, 0.1193809, 0.73852306, 0.58730363) .toSparse()), Row.of( 0.29894977, Vectors.dense( 0.47163253, 0.10712682, 0.22921857, 0.89996519, 0.41675354, 0.53585166) .toSparse()), Row.of( 0.34056582, Vectors.dense( 0.00620852, 0.30064171, 0.43689317, 0.612149, 0.91819808, 0.62573667) .toSparse()), Row.of( 0.64476446, Vectors.dense( 0.70599757, 0.14983372, 0.74606341, 0.83100699, 0.63372577, 0.43830988) .toSparse()), Row.of( 0.53724782, Vectors.dense( 0.15257277, 0.56840962, 0.52822428, 0.95142876, 0.48035918, 0.50255956) .toSparse()), Row.of( 0.5173021, Vectors.dense( 0.53687819, 0.81920207, 0.05711564, 0.66942174, 0.76711663, 0.70811536) .toSparse()), Row.of( 0.94508275, Vectors.dense( 0.79686718, 0.55776083, 0.96583653, 0.1471569, 0.029647, 0.59389349) .toSparse()), Row.of( 0.57739736, Vectors.dense( 0.1140657, 0.95080985, 0.96583653, 0.19361869, 0.45781165, 0.92040257) .toSparse()), Row.of( 0.53877145, Vectors.dense( 0.87906916, 0.25261576, 0.34800879, 0.18258873, 0.90179605, 0.70652816) .toSparse())); @Before public void before() { env = TestUtils.getExecutionEnvironment(); tEnv = StreamTableEnvironment.create(env); selectorWithChiSqTest = new UnivariateFeatureSelector() .setFeatureType("categorical") .setLabelType("categorical"); selectorWithANOVATest = new UnivariateFeatureSelector() .setFeatureType("continuous") .setLabelType("categorical"); selectorWithFValueTest = new UnivariateFeatureSelector() .setFeatureType("continuous") .setLabelType("continuous"); inputChiSqTable = tEnv.fromDataStream( env.fromCollection( INPUT_CHISQ_DATA, Types.ROW(Types.DOUBLE, VectorTypeInfo.INSTANCE))) .as("label", "features"); inputANOVATable = tEnv.fromDataStream( env.fromCollection( INPUT_ANOVA_DATA, Types.ROW(Types.INT, VectorTypeInfo.INSTANCE))) .as("label", "features"); inputFValueTable = tEnv.fromDataStream( env.fromCollection( INPUT_FVALUE_DATA, Types.ROW(Types.DOUBLE, VectorTypeInfo.INSTANCE))) .as("label", "features"); } private void transformAndVerify( UnivariateFeatureSelector selector, Table table, int... expectedIndices) throws Exception { UnivariateFeatureSelectorModel model = selector.fit(table); Table output = model.transform(table)[0]; verifyOutputResult(output, expectedIndices); } private void verifyOutputResult(Table table, int... expectedIndices) throws Exception { StreamTableEnvironment tEnv = (StreamTableEnvironment) ((TableImpl) table).getTableEnvironment(); CloseableIterator<Row> rowIterator = tEnv.toDataStream(table).executeAndCollect(); while (rowIterator.hasNext()) { Row row = rowIterator.next(); assertEquals(expectedIndices.length, ((Vector) row.getField("output")).size()); for (int i = 0; i < expectedIndices.length; i++) { assertEquals( ((Vector) row.getField("features")).get(expectedIndices[i]), ((Vector) row.getField("output")).get(i), EPS); } } } @Test public void testParam() { UnivariateFeatureSelector selector = new UnivariateFeatureSelector(); assertEquals("features", selector.getFeaturesCol()); assertEquals("label", selector.getLabelCol()); assertEquals("output", selector.getOutputCol()); assertEquals("numTopFeatures", selector.getSelectionMode()); assertNull(selector.getSelectionThreshold()); selector.setFeaturesCol("test_features") .setLabelCol("test_label") .setOutputCol("test_output") .setFeatureType("continuous") .setLabelType("categorical") .setSelectionMode("fpr") .setSelectionThreshold(0.01); assertEquals("test_features", selector.getFeaturesCol()); assertEquals("test_label", selector.getLabelCol()); assertEquals("test_output", selector.getOutputCol()); assertEquals("continuous", selector.getFeatureType()); assertEquals("categorical", selector.getLabelType()); assertEquals("fpr", selector.getSelectionMode()); assertEquals(0.01, selector.getSelectionThreshold(), EPS); } @Test public void testIncompatibleSelectionModeAndThreshold() { UnivariateFeatureSelector selector = new UnivariateFeatureSelector() .setFeatureType("continuous") .setLabelType("categorical") .setSelectionThreshold(50.1); try { selector.fit(inputANOVATable); fail(); } catch (Throwable e) { assertEquals( "SelectionThreshold needs to be a positive Integer " + "for selection mode numTopFeatures, but got 50.1.", e.getMessage()); } try { selector.setSelectionMode("fpr").setSelectionThreshold(1.1).fit(inputANOVATable); fail(); } catch (Throwable e) { assertEquals( "SelectionThreshold needs to be in the range [0, 1] " + "for selection mode fpr, but got 1.1.", e.getMessage()); } } @Test public void testOutputSchema() { Table tempTable = inputANOVATable.as("test_label", "test_features"); UnivariateFeatureSelector selector = new UnivariateFeatureSelector() .setLabelCol("test_label") .setFeaturesCol("test_features") .setOutputCol("test_output") .setFeatureType("continuous") .setLabelType("categorical"); UnivariateFeatureSelectorModel model = selector.fit(tempTable); Table output = model.transform(tempTable)[0]; assertEquals( Arrays.asList("test_label", "test_features", "test_output"), output.getResolvedSchema().getColumnNames()); } @Test public void testFitTransformWithNumTopFeatures() throws Exception { transformAndVerify(selectorWithChiSqTest.setSelectionThreshold(2), inputChiSqTable, 0, 1); transformAndVerify(selectorWithANOVATest.setSelectionThreshold(2), inputANOVATable, 0, 2); transformAndVerify(selectorWithFValueTest.setSelectionThreshold(2), inputFValueTable, 0, 2); } @Test public void testFitTransformWithPercentile() throws Exception { transformAndVerify( selectorWithChiSqTest.setSelectionMode("percentile").setSelectionThreshold(0.17), inputChiSqTable, 0); transformAndVerify( selectorWithANOVATest.setSelectionMode("percentile").setSelectionThreshold(0.17), inputANOVATable, 0); transformAndVerify( selectorWithFValueTest.setSelectionMode("percentile").setSelectionThreshold(0.17), inputFValueTable, 2); } @Test public void testFitTransformWithFPR() throws Exception { transformAndVerify( selectorWithChiSqTest.setSelectionMode("fpr").setSelectionThreshold(0.02), inputChiSqTable, 0); transformAndVerify( selectorWithANOVATest.setSelectionMode("fpr").setSelectionThreshold(1.0E-12), inputANOVATable, 0); transformAndVerify( selectorWithFValueTest.setSelectionMode("fpr").setSelectionThreshold(0.01), inputFValueTable, 2); } @Test public void testFitTransformWithFDR() throws Exception { transformAndVerify( selectorWithChiSqTest.setSelectionMode("fdr").setSelectionThreshold(0.12), inputChiSqTable, 0); transformAndVerify( selectorWithANOVATest.setSelectionMode("fdr").setSelectionThreshold(6.0E-12), inputANOVATable, 0); transformAndVerify( selectorWithFValueTest.setSelectionMode("fdr").setSelectionThreshold(0.03), inputFValueTable, 2); } @Test public void testFitTransformWithFWE() throws Exception { transformAndVerify( selectorWithChiSqTest.setSelectionMode("fwe").setSelectionThreshold(0.12), inputChiSqTable, 0); transformAndVerify( selectorWithANOVATest.setSelectionMode("fwe").setSelectionThreshold(6.0E-12), inputANOVATable, 0); transformAndVerify( selectorWithFValueTest.setSelectionMode("fwe").setSelectionThreshold(0.03), inputFValueTable, 2); } @Test public void testSaveLoadAndPredict() throws Exception { UnivariateFeatureSelector selector = new UnivariateFeatureSelector() .setFeatureType("continuous") .setLabelType("categorical") .setSelectionThreshold(1); UnivariateFeatureSelector loadSelector = TestUtils.saveAndReload( tEnv, selector, tempFolder.newFolder().getAbsolutePath(), UnivariateFeatureSelector::load); UnivariateFeatureSelectorModel model = loadSelector.fit(inputANOVATable); UnivariateFeatureSelectorModel loadedModel = TestUtils.saveAndReload( tEnv, model, tempFolder.newFolder().getAbsolutePath(), UnivariateFeatureSelectorModel::load); assertEquals( Collections.singletonList("indices"), model.getModelData()[0].getResolvedSchema().getColumnNames()); Table output = loadedModel.transform(inputANOVATable)[0]; verifyOutputResult(output, 0); } @Test public void testIncompatibleNumOfFeatures() { UnivariateFeatureSelector selector = new UnivariateFeatureSelector() .setFeatureType("continuous") .setLabelType("continuous") .setSelectionThreshold(1); UnivariateFeatureSelectorModel model = selector.fit(inputFValueTable); List<Row> predictData = new ArrayList<>( Arrays.asList( Row.of(1, Vectors.dense(1.0, 2.0)), Row.of(-1, Vectors.dense(-1.0, -2.0)))); Table predictTable = tEnv.fromDataStream(env.fromCollection(predictData)).as("label", "features"); Table output = model.transform(predictTable)[0]; try { output.execute().print(); fail(); } catch (Throwable e) { assertEquals( "Input 2 features, but UnivariateFeatureSelector is " + "expecting at least 3 features as input.", ExceptionUtils.getRootCause(e).getMessage()); } } @Test public void testEquivalentPValues() throws Exception { List<Row> inputData = Arrays.asList( Row.of(0.0, Vectors.dense(6.0, 7.0, 0.0, 6.0, 6.0, 6.0)), Row.of(1.0, Vectors.dense(0.0, 9.0, 6.0, 0.0, 5.0, 0.0)), Row.of(1.0, Vectors.dense(0.0, 9.0, 3.0, 0.0, 5.0, 0.0)), Row.of(1.0, Vectors.dense(0.0, 9.0, 8.0, 0.0, 6.0, 0.0)), Row.of(2.0, Vectors.dense(8.0, 9.0, 6.0, 8.0, 4.0, 8.0)), Row.of(2.0, Vectors.dense(8.0, 9.0, 6.0, 8.0, 0.0, 8.0))); Table inputTable = tEnv.fromDataStream( env.fromCollection( inputData, Types.ROW(Types.DOUBLE, VectorTypeInfo.INSTANCE))) .as("label", "features"); UnivariateFeatureSelectorModel model = selectorWithChiSqTest.setSelectionThreshold(4).fit(inputTable); Table modelData = model.getModelData()[0]; DataStream<Row> output = tEnv.toDataStream(modelData); List<Row> modelRows = IteratorUtils.toList(output.executeAndCollect()); int[] expectedIndices = {0, 3, 5, 1}; assertArrayEquals(expectedIndices, (int[]) modelRows.get(0).getField(0)); } @Test public void testGetModelData() throws Exception { UnivariateFeatureSelector selector = new UnivariateFeatureSelector() .setFeatureType("continuous") .setLabelType("categorical") .setSelectionThreshold(3); UnivariateFeatureSelectorModel model = selector.fit(inputANOVATable); Table modelData = model.getModelData()[0]; assertEquals( Collections.singletonList("indices"), modelData.getResolvedSchema().getColumnNames()); DataStream<Row> output = tEnv.toDataStream(modelData); List<Row> modelRows = IteratorUtils.toList(output.executeAndCollect()); int[] expectedIndices = {0, 2, 1}; assertArrayEquals(expectedIndices, (int[]) modelRows.get(0).getField(0)); } @Test public void testSetModelData() throws Exception { UnivariateFeatureSelector selector = selectorWithANOVATest.setSelectionMode("fpr").setSelectionThreshold(1.0E-12); UnivariateFeatureSelectorModel modelA = selector.fit(inputANOVATable); Table modelData = modelA.getModelData()[0]; UnivariateFeatureSelectorModel modelB = new UnivariateFeatureSelectorModel().setModelData(modelData); Table output = modelB.transform(inputANOVATable)[0]; verifyOutputResult(output, 0); } }
googleads/google-ads-java
35,742
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/BatchJobServiceGrpc.java
package com.google.ads.googleads.v19.services; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * <pre> * Service to manage batch jobs. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/ads/googleads/v19/services/batch_job_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class BatchJobServiceGrpc { private BatchJobServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.ads.googleads.v19.services.BatchJobService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.MutateBatchJobRequest, com.google.ads.googleads.v19.services.MutateBatchJobResponse> getMutateBatchJobMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "MutateBatchJob", requestType = com.google.ads.googleads.v19.services.MutateBatchJobRequest.class, responseType = com.google.ads.googleads.v19.services.MutateBatchJobResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.MutateBatchJobRequest, com.google.ads.googleads.v19.services.MutateBatchJobResponse> getMutateBatchJobMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.MutateBatchJobRequest, com.google.ads.googleads.v19.services.MutateBatchJobResponse> getMutateBatchJobMethod; if ((getMutateBatchJobMethod = BatchJobServiceGrpc.getMutateBatchJobMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getMutateBatchJobMethod = BatchJobServiceGrpc.getMutateBatchJobMethod) == null) { BatchJobServiceGrpc.getMutateBatchJobMethod = getMutateBatchJobMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.MutateBatchJobRequest, com.google.ads.googleads.v19.services.MutateBatchJobResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "MutateBatchJob")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v19.services.MutateBatchJobRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v19.services.MutateBatchJobResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("MutateBatchJob")) .build(); } } } return getMutateBatchJobMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.ListBatchJobResultsRequest, com.google.ads.googleads.v19.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListBatchJobResults", requestType = com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.class, responseType = com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.ListBatchJobResultsRequest, com.google.ads.googleads.v19.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.ListBatchJobResultsRequest, com.google.ads.googleads.v19.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod; if ((getListBatchJobResultsMethod = BatchJobServiceGrpc.getListBatchJobResultsMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getListBatchJobResultsMethod = BatchJobServiceGrpc.getListBatchJobResultsMethod) == null) { BatchJobServiceGrpc.getListBatchJobResultsMethod = getListBatchJobResultsMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.ListBatchJobResultsRequest, com.google.ads.googleads.v19.services.ListBatchJobResultsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListBatchJobResults")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("ListBatchJobResults")) .build(); } } } return getListBatchJobResultsMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "RunBatchJob", requestType = com.google.ads.googleads.v19.services.RunBatchJobRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod; if ((getRunBatchJobMethod = BatchJobServiceGrpc.getRunBatchJobMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getRunBatchJobMethod = BatchJobServiceGrpc.getRunBatchJobMethod) == null) { BatchJobServiceGrpc.getRunBatchJobMethod = getRunBatchJobMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.RunBatchJobRequest, com.google.longrunning.Operation>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "RunBatchJob")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v19.services.RunBatchJobRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("RunBatchJob")) .build(); } } } return getRunBatchJobMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "AddBatchJobOperations", requestType = com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest.class, responseType = com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod; if ((getAddBatchJobOperationsMethod = BatchJobServiceGrpc.getAddBatchJobOperationsMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getAddBatchJobOperationsMethod = BatchJobServiceGrpc.getAddBatchJobOperationsMethod) == null) { BatchJobServiceGrpc.getAddBatchJobOperationsMethod = getAddBatchJobOperationsMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "AddBatchJobOperations")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("AddBatchJobOperations")) .build(); } } } return getAddBatchJobOperationsMethod; } /** * Creates a new async stub that supports all call types for the service */ public static BatchJobServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceStub>() { @java.lang.Override public BatchJobServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceStub(channel, callOptions); } }; return BatchJobServiceStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports all types of calls on the service */ public static BatchJobServiceBlockingV2Stub newBlockingV2Stub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingV2Stub>() { @java.lang.Override public BatchJobServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingV2Stub(channel, callOptions); } }; return BatchJobServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static BatchJobServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingStub>() { @java.lang.Override public BatchJobServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingStub(channel, callOptions); } }; return BatchJobServiceBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static BatchJobServiceFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceFutureStub>() { @java.lang.Override public BatchJobServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceFutureStub(channel, callOptions); } }; return BatchJobServiceFutureStub.newStub(factory, channel); } /** * <pre> * Service to manage batch jobs. * </pre> */ public interface AsyncService { /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ default void mutateBatchJob(com.google.ads.googleads.v19.services.MutateBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.MutateBatchJobResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMutateBatchJobMethod(), responseObserver); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ default void listBatchJobResults(com.google.ads.googleads.v19.services.ListBatchJobResultsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.ListBatchJobResultsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListBatchJobResultsMethod(), responseObserver); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ default void runBatchJob(com.google.ads.googleads.v19.services.RunBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getRunBatchJobMethod(), responseObserver); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ default void addBatchJobOperations(com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getAddBatchJobOperationsMethod(), responseObserver); } } /** * Base class for the server implementation of the service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static abstract class BatchJobServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return BatchJobServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceStub extends io.grpc.stub.AbstractAsyncStub<BatchJobServiceStub> { private BatchJobServiceStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public void mutateBatchJob(com.google.ads.googleads.v19.services.MutateBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.MutateBatchJobResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getMutateBatchJobMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void listBatchJobResults(com.google.ads.googleads.v19.services.ListBatchJobResultsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.ListBatchJobResultsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListBatchJobResultsMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void runBatchJob(com.google.ads.googleads.v19.services.RunBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getRunBatchJobMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public void addBatchJobOperations(com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getAddBatchJobOperationsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<BatchJobServiceBlockingV2Stub> { private BatchJobServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingV2Stub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v19.services.MutateBatchJobResponse mutateBatchJob(com.google.ads.googleads.v19.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getMutateBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.ads.googleads.v19.services.ListBatchJobResultsResponse listBatchJobResults(com.google.ads.googleads.v19.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListBatchJobResultsMethod(), getCallOptions(), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.longrunning.Operation runBatchJob(com.google.ads.googleads.v19.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRunBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse addBatchJobOperations(com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getAddBatchJobOperationsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<BatchJobServiceBlockingStub> { private BatchJobServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v19.services.MutateBatchJobResponse mutateBatchJob(com.google.ads.googleads.v19.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getMutateBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.ads.googleads.v19.services.ListBatchJobResultsResponse listBatchJobResults(com.google.ads.googleads.v19.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListBatchJobResultsMethod(), getCallOptions(), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.longrunning.Operation runBatchJob(com.google.ads.googleads.v19.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRunBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse addBatchJobOperations(com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getAddBatchJobOperationsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceFutureStub extends io.grpc.stub.AbstractFutureStub<BatchJobServiceFutureStub> { private BatchJobServiceFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceFutureStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v19.services.MutateBatchJobResponse> mutateBatchJob( com.google.ads.googleads.v19.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getMutateBatchJobMethod(), getCallOptions()), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v19.services.ListBatchJobResultsResponse> listBatchJobResults( com.google.ads.googleads.v19.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListBatchJobResultsMethod(), getCallOptions()), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> runBatchJob( com.google.ads.googleads.v19.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getRunBatchJobMethod(), getCallOptions()), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse> addBatchJobOperations( com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getAddBatchJobOperationsMethod(), getCallOptions()), request); } } private static final int METHODID_MUTATE_BATCH_JOB = 0; private static final int METHODID_LIST_BATCH_JOB_RESULTS = 1; private static final int METHODID_RUN_BATCH_JOB = 2; private static final int METHODID_ADD_BATCH_JOB_OPERATIONS = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_MUTATE_BATCH_JOB: serviceImpl.mutateBatchJob((com.google.ads.googleads.v19.services.MutateBatchJobRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.MutateBatchJobResponse>) responseObserver); break; case METHODID_LIST_BATCH_JOB_RESULTS: serviceImpl.listBatchJobResults((com.google.ads.googleads.v19.services.ListBatchJobResultsRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.ListBatchJobResultsResponse>) responseObserver); break; case METHODID_RUN_BATCH_JOB: serviceImpl.runBatchJob((com.google.ads.googleads.v19.services.RunBatchJobRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_ADD_BATCH_JOB_OPERATIONS: serviceImpl.addBatchJobOperations((com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getMutateBatchJobMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v19.services.MutateBatchJobRequest, com.google.ads.googleads.v19.services.MutateBatchJobResponse>( service, METHODID_MUTATE_BATCH_JOB))) .addMethod( getListBatchJobResultsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v19.services.ListBatchJobResultsRequest, com.google.ads.googleads.v19.services.ListBatchJobResultsResponse>( service, METHODID_LIST_BATCH_JOB_RESULTS))) .addMethod( getRunBatchJobMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v19.services.RunBatchJobRequest, com.google.longrunning.Operation>( service, METHODID_RUN_BATCH_JOB))) .addMethod( getAddBatchJobOperationsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v19.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v19.services.AddBatchJobOperationsResponse>( service, METHODID_ADD_BATCH_JOB_OPERATIONS))) .build(); } private static abstract class BatchJobServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { BatchJobServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.ads.googleads.v19.services.BatchJobServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("BatchJobService"); } } private static final class BatchJobServiceFileDescriptorSupplier extends BatchJobServiceBaseDescriptorSupplier { BatchJobServiceFileDescriptorSupplier() {} } private static final class BatchJobServiceMethodDescriptorSupplier extends BatchJobServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; BatchJobServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (BatchJobServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new BatchJobServiceFileDescriptorSupplier()) .addMethod(getMutateBatchJobMethod()) .addMethod(getListBatchJobResultsMethod()) .addMethod(getRunBatchJobMethod()) .addMethod(getAddBatchJobOperationsMethod()) .build(); } } } return result; } }
googleads/google-ads-java
35,742
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/BatchJobServiceGrpc.java
package com.google.ads.googleads.v20.services; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * <pre> * Service to manage batch jobs. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/ads/googleads/v20/services/batch_job_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class BatchJobServiceGrpc { private BatchJobServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.ads.googleads.v20.services.BatchJobService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.MutateBatchJobRequest, com.google.ads.googleads.v20.services.MutateBatchJobResponse> getMutateBatchJobMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "MutateBatchJob", requestType = com.google.ads.googleads.v20.services.MutateBatchJobRequest.class, responseType = com.google.ads.googleads.v20.services.MutateBatchJobResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.MutateBatchJobRequest, com.google.ads.googleads.v20.services.MutateBatchJobResponse> getMutateBatchJobMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.MutateBatchJobRequest, com.google.ads.googleads.v20.services.MutateBatchJobResponse> getMutateBatchJobMethod; if ((getMutateBatchJobMethod = BatchJobServiceGrpc.getMutateBatchJobMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getMutateBatchJobMethod = BatchJobServiceGrpc.getMutateBatchJobMethod) == null) { BatchJobServiceGrpc.getMutateBatchJobMethod = getMutateBatchJobMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.MutateBatchJobRequest, com.google.ads.googleads.v20.services.MutateBatchJobResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "MutateBatchJob")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v20.services.MutateBatchJobRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v20.services.MutateBatchJobResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("MutateBatchJob")) .build(); } } } return getMutateBatchJobMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.ListBatchJobResultsRequest, com.google.ads.googleads.v20.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListBatchJobResults", requestType = com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.class, responseType = com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.ListBatchJobResultsRequest, com.google.ads.googleads.v20.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.ListBatchJobResultsRequest, com.google.ads.googleads.v20.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod; if ((getListBatchJobResultsMethod = BatchJobServiceGrpc.getListBatchJobResultsMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getListBatchJobResultsMethod = BatchJobServiceGrpc.getListBatchJobResultsMethod) == null) { BatchJobServiceGrpc.getListBatchJobResultsMethod = getListBatchJobResultsMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.ListBatchJobResultsRequest, com.google.ads.googleads.v20.services.ListBatchJobResultsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListBatchJobResults")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("ListBatchJobResults")) .build(); } } } return getListBatchJobResultsMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "RunBatchJob", requestType = com.google.ads.googleads.v20.services.RunBatchJobRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod; if ((getRunBatchJobMethod = BatchJobServiceGrpc.getRunBatchJobMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getRunBatchJobMethod = BatchJobServiceGrpc.getRunBatchJobMethod) == null) { BatchJobServiceGrpc.getRunBatchJobMethod = getRunBatchJobMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.RunBatchJobRequest, com.google.longrunning.Operation>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "RunBatchJob")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v20.services.RunBatchJobRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("RunBatchJob")) .build(); } } } return getRunBatchJobMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "AddBatchJobOperations", requestType = com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest.class, responseType = com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod; if ((getAddBatchJobOperationsMethod = BatchJobServiceGrpc.getAddBatchJobOperationsMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getAddBatchJobOperationsMethod = BatchJobServiceGrpc.getAddBatchJobOperationsMethod) == null) { BatchJobServiceGrpc.getAddBatchJobOperationsMethod = getAddBatchJobOperationsMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "AddBatchJobOperations")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("AddBatchJobOperations")) .build(); } } } return getAddBatchJobOperationsMethod; } /** * Creates a new async stub that supports all call types for the service */ public static BatchJobServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceStub>() { @java.lang.Override public BatchJobServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceStub(channel, callOptions); } }; return BatchJobServiceStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports all types of calls on the service */ public static BatchJobServiceBlockingV2Stub newBlockingV2Stub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingV2Stub>() { @java.lang.Override public BatchJobServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingV2Stub(channel, callOptions); } }; return BatchJobServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static BatchJobServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingStub>() { @java.lang.Override public BatchJobServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingStub(channel, callOptions); } }; return BatchJobServiceBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static BatchJobServiceFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceFutureStub>() { @java.lang.Override public BatchJobServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceFutureStub(channel, callOptions); } }; return BatchJobServiceFutureStub.newStub(factory, channel); } /** * <pre> * Service to manage batch jobs. * </pre> */ public interface AsyncService { /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ default void mutateBatchJob(com.google.ads.googleads.v20.services.MutateBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.MutateBatchJobResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMutateBatchJobMethod(), responseObserver); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ default void listBatchJobResults(com.google.ads.googleads.v20.services.ListBatchJobResultsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.ListBatchJobResultsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListBatchJobResultsMethod(), responseObserver); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ default void runBatchJob(com.google.ads.googleads.v20.services.RunBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getRunBatchJobMethod(), responseObserver); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ default void addBatchJobOperations(com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getAddBatchJobOperationsMethod(), responseObserver); } } /** * Base class for the server implementation of the service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static abstract class BatchJobServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return BatchJobServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceStub extends io.grpc.stub.AbstractAsyncStub<BatchJobServiceStub> { private BatchJobServiceStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public void mutateBatchJob(com.google.ads.googleads.v20.services.MutateBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.MutateBatchJobResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getMutateBatchJobMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void listBatchJobResults(com.google.ads.googleads.v20.services.ListBatchJobResultsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.ListBatchJobResultsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListBatchJobResultsMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void runBatchJob(com.google.ads.googleads.v20.services.RunBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getRunBatchJobMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public void addBatchJobOperations(com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getAddBatchJobOperationsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<BatchJobServiceBlockingV2Stub> { private BatchJobServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingV2Stub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v20.services.MutateBatchJobResponse mutateBatchJob(com.google.ads.googleads.v20.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getMutateBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.ads.googleads.v20.services.ListBatchJobResultsResponse listBatchJobResults(com.google.ads.googleads.v20.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListBatchJobResultsMethod(), getCallOptions(), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.longrunning.Operation runBatchJob(com.google.ads.googleads.v20.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRunBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse addBatchJobOperations(com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getAddBatchJobOperationsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<BatchJobServiceBlockingStub> { private BatchJobServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v20.services.MutateBatchJobResponse mutateBatchJob(com.google.ads.googleads.v20.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getMutateBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.ads.googleads.v20.services.ListBatchJobResultsResponse listBatchJobResults(com.google.ads.googleads.v20.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListBatchJobResultsMethod(), getCallOptions(), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.longrunning.Operation runBatchJob(com.google.ads.googleads.v20.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRunBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse addBatchJobOperations(com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getAddBatchJobOperationsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceFutureStub extends io.grpc.stub.AbstractFutureStub<BatchJobServiceFutureStub> { private BatchJobServiceFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceFutureStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v20.services.MutateBatchJobResponse> mutateBatchJob( com.google.ads.googleads.v20.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getMutateBatchJobMethod(), getCallOptions()), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v20.services.ListBatchJobResultsResponse> listBatchJobResults( com.google.ads.googleads.v20.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListBatchJobResultsMethod(), getCallOptions()), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> runBatchJob( com.google.ads.googleads.v20.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getRunBatchJobMethod(), getCallOptions()), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse> addBatchJobOperations( com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getAddBatchJobOperationsMethod(), getCallOptions()), request); } } private static final int METHODID_MUTATE_BATCH_JOB = 0; private static final int METHODID_LIST_BATCH_JOB_RESULTS = 1; private static final int METHODID_RUN_BATCH_JOB = 2; private static final int METHODID_ADD_BATCH_JOB_OPERATIONS = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_MUTATE_BATCH_JOB: serviceImpl.mutateBatchJob((com.google.ads.googleads.v20.services.MutateBatchJobRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.MutateBatchJobResponse>) responseObserver); break; case METHODID_LIST_BATCH_JOB_RESULTS: serviceImpl.listBatchJobResults((com.google.ads.googleads.v20.services.ListBatchJobResultsRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.ListBatchJobResultsResponse>) responseObserver); break; case METHODID_RUN_BATCH_JOB: serviceImpl.runBatchJob((com.google.ads.googleads.v20.services.RunBatchJobRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_ADD_BATCH_JOB_OPERATIONS: serviceImpl.addBatchJobOperations((com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getMutateBatchJobMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v20.services.MutateBatchJobRequest, com.google.ads.googleads.v20.services.MutateBatchJobResponse>( service, METHODID_MUTATE_BATCH_JOB))) .addMethod( getListBatchJobResultsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v20.services.ListBatchJobResultsRequest, com.google.ads.googleads.v20.services.ListBatchJobResultsResponse>( service, METHODID_LIST_BATCH_JOB_RESULTS))) .addMethod( getRunBatchJobMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v20.services.RunBatchJobRequest, com.google.longrunning.Operation>( service, METHODID_RUN_BATCH_JOB))) .addMethod( getAddBatchJobOperationsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v20.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v20.services.AddBatchJobOperationsResponse>( service, METHODID_ADD_BATCH_JOB_OPERATIONS))) .build(); } private static abstract class BatchJobServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { BatchJobServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.ads.googleads.v20.services.BatchJobServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("BatchJobService"); } } private static final class BatchJobServiceFileDescriptorSupplier extends BatchJobServiceBaseDescriptorSupplier { BatchJobServiceFileDescriptorSupplier() {} } private static final class BatchJobServiceMethodDescriptorSupplier extends BatchJobServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; BatchJobServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (BatchJobServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new BatchJobServiceFileDescriptorSupplier()) .addMethod(getMutateBatchJobMethod()) .addMethod(getListBatchJobResultsMethod()) .addMethod(getRunBatchJobMethod()) .addMethod(getAddBatchJobOperationsMethod()) .build(); } } } return result; } }
googleads/google-ads-java
35,742
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/BatchJobServiceGrpc.java
package com.google.ads.googleads.v21.services; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * <pre> * Service to manage batch jobs. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/ads/googleads/v21/services/batch_job_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class BatchJobServiceGrpc { private BatchJobServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.ads.googleads.v21.services.BatchJobService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.MutateBatchJobRequest, com.google.ads.googleads.v21.services.MutateBatchJobResponse> getMutateBatchJobMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "MutateBatchJob", requestType = com.google.ads.googleads.v21.services.MutateBatchJobRequest.class, responseType = com.google.ads.googleads.v21.services.MutateBatchJobResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.MutateBatchJobRequest, com.google.ads.googleads.v21.services.MutateBatchJobResponse> getMutateBatchJobMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.MutateBatchJobRequest, com.google.ads.googleads.v21.services.MutateBatchJobResponse> getMutateBatchJobMethod; if ((getMutateBatchJobMethod = BatchJobServiceGrpc.getMutateBatchJobMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getMutateBatchJobMethod = BatchJobServiceGrpc.getMutateBatchJobMethod) == null) { BatchJobServiceGrpc.getMutateBatchJobMethod = getMutateBatchJobMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.MutateBatchJobRequest, com.google.ads.googleads.v21.services.MutateBatchJobResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "MutateBatchJob")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v21.services.MutateBatchJobRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v21.services.MutateBatchJobResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("MutateBatchJob")) .build(); } } } return getMutateBatchJobMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.ListBatchJobResultsRequest, com.google.ads.googleads.v21.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListBatchJobResults", requestType = com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.class, responseType = com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.ListBatchJobResultsRequest, com.google.ads.googleads.v21.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.ListBatchJobResultsRequest, com.google.ads.googleads.v21.services.ListBatchJobResultsResponse> getListBatchJobResultsMethod; if ((getListBatchJobResultsMethod = BatchJobServiceGrpc.getListBatchJobResultsMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getListBatchJobResultsMethod = BatchJobServiceGrpc.getListBatchJobResultsMethod) == null) { BatchJobServiceGrpc.getListBatchJobResultsMethod = getListBatchJobResultsMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.ListBatchJobResultsRequest, com.google.ads.googleads.v21.services.ListBatchJobResultsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListBatchJobResults")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("ListBatchJobResults")) .build(); } } } return getListBatchJobResultsMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "RunBatchJob", requestType = com.google.ads.googleads.v21.services.RunBatchJobRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.RunBatchJobRequest, com.google.longrunning.Operation> getRunBatchJobMethod; if ((getRunBatchJobMethod = BatchJobServiceGrpc.getRunBatchJobMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getRunBatchJobMethod = BatchJobServiceGrpc.getRunBatchJobMethod) == null) { BatchJobServiceGrpc.getRunBatchJobMethod = getRunBatchJobMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.RunBatchJobRequest, com.google.longrunning.Operation>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "RunBatchJob")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v21.services.RunBatchJobRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("RunBatchJob")) .build(); } } } return getRunBatchJobMethod; } private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "AddBatchJobOperations", requestType = com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest.class, responseType = com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse> getAddBatchJobOperationsMethod; if ((getAddBatchJobOperationsMethod = BatchJobServiceGrpc.getAddBatchJobOperationsMethod) == null) { synchronized (BatchJobServiceGrpc.class) { if ((getAddBatchJobOperationsMethod = BatchJobServiceGrpc.getAddBatchJobOperationsMethod) == null) { BatchJobServiceGrpc.getAddBatchJobOperationsMethod = getAddBatchJobOperationsMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "AddBatchJobOperations")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse.getDefaultInstance())) .setSchemaDescriptor(new BatchJobServiceMethodDescriptorSupplier("AddBatchJobOperations")) .build(); } } } return getAddBatchJobOperationsMethod; } /** * Creates a new async stub that supports all call types for the service */ public static BatchJobServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceStub>() { @java.lang.Override public BatchJobServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceStub(channel, callOptions); } }; return BatchJobServiceStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports all types of calls on the service */ public static BatchJobServiceBlockingV2Stub newBlockingV2Stub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingV2Stub>() { @java.lang.Override public BatchJobServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingV2Stub(channel, callOptions); } }; return BatchJobServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static BatchJobServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceBlockingStub>() { @java.lang.Override public BatchJobServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingStub(channel, callOptions); } }; return BatchJobServiceBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static BatchJobServiceFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<BatchJobServiceFutureStub>() { @java.lang.Override public BatchJobServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceFutureStub(channel, callOptions); } }; return BatchJobServiceFutureStub.newStub(factory, channel); } /** * <pre> * Service to manage batch jobs. * </pre> */ public interface AsyncService { /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ default void mutateBatchJob(com.google.ads.googleads.v21.services.MutateBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.MutateBatchJobResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMutateBatchJobMethod(), responseObserver); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ default void listBatchJobResults(com.google.ads.googleads.v21.services.ListBatchJobResultsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.ListBatchJobResultsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListBatchJobResultsMethod(), responseObserver); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ default void runBatchJob(com.google.ads.googleads.v21.services.RunBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getRunBatchJobMethod(), responseObserver); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ default void addBatchJobOperations(com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getAddBatchJobOperationsMethod(), responseObserver); } } /** * Base class for the server implementation of the service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static abstract class BatchJobServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return BatchJobServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceStub extends io.grpc.stub.AbstractAsyncStub<BatchJobServiceStub> { private BatchJobServiceStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public void mutateBatchJob(com.google.ads.googleads.v21.services.MutateBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.MutateBatchJobResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getMutateBatchJobMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void listBatchJobResults(com.google.ads.googleads.v21.services.ListBatchJobResultsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.ListBatchJobResultsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListBatchJobResultsMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void runBatchJob(com.google.ads.googleads.v21.services.RunBatchJobRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getRunBatchJobMethod(), getCallOptions()), request, responseObserver); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public void addBatchJobOperations(com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getAddBatchJobOperationsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<BatchJobServiceBlockingV2Stub> { private BatchJobServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingV2Stub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v21.services.MutateBatchJobResponse mutateBatchJob(com.google.ads.googleads.v21.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getMutateBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.ads.googleads.v21.services.ListBatchJobResultsResponse listBatchJobResults(com.google.ads.googleads.v21.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListBatchJobResultsMethod(), getCallOptions(), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.longrunning.Operation runBatchJob(com.google.ads.googleads.v21.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRunBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse addBatchJobOperations(com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getAddBatchJobOperationsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<BatchJobServiceBlockingStub> { private BatchJobServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceBlockingStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v21.services.MutateBatchJobResponse mutateBatchJob(com.google.ads.googleads.v21.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getMutateBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.ads.googleads.v21.services.ListBatchJobResultsResponse listBatchJobResults(com.google.ads.googleads.v21.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListBatchJobResultsMethod(), getCallOptions(), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.longrunning.Operation runBatchJob(com.google.ads.googleads.v21.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRunBatchJobMethod(), getCallOptions(), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse addBatchJobOperations(com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getAddBatchJobOperationsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service BatchJobService. * <pre> * Service to manage batch jobs. * </pre> */ public static final class BatchJobServiceFutureStub extends io.grpc.stub.AbstractFutureStub<BatchJobServiceFutureStub> { private BatchJobServiceFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected BatchJobServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new BatchJobServiceFutureStub(channel, callOptions); } /** * <pre> * Mutates a batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v21.services.MutateBatchJobResponse> mutateBatchJob( com.google.ads.googleads.v21.services.MutateBatchJobRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getMutateBatchJobMethod(), getCallOptions()), request); } /** * <pre> * Returns the results of the batch job. The job must be done. * Supports standard list paging. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v21.services.ListBatchJobResultsResponse> listBatchJobResults( com.google.ads.googleads.v21.services.ListBatchJobResultsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListBatchJobResultsMethod(), getCallOptions()), request); } /** * <pre> * Runs the batch job. * The Operation.metadata field type is BatchJobMetadata. When finished, the * long running operation will not contain errors or a response. Instead, use * ListBatchJobResults to get the results of the job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> runBatchJob( com.google.ads.googleads.v21.services.RunBatchJobRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getRunBatchJobMethod(), getCallOptions()), request); } /** * <pre> * Add operations to the batch job. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [BatchJobError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * [ResourceCountLimitExceededError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse> addBatchJobOperations( com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getAddBatchJobOperationsMethod(), getCallOptions()), request); } } private static final int METHODID_MUTATE_BATCH_JOB = 0; private static final int METHODID_LIST_BATCH_JOB_RESULTS = 1; private static final int METHODID_RUN_BATCH_JOB = 2; private static final int METHODID_ADD_BATCH_JOB_OPERATIONS = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_MUTATE_BATCH_JOB: serviceImpl.mutateBatchJob((com.google.ads.googleads.v21.services.MutateBatchJobRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.MutateBatchJobResponse>) responseObserver); break; case METHODID_LIST_BATCH_JOB_RESULTS: serviceImpl.listBatchJobResults((com.google.ads.googleads.v21.services.ListBatchJobResultsRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.ListBatchJobResultsResponse>) responseObserver); break; case METHODID_RUN_BATCH_JOB: serviceImpl.runBatchJob((com.google.ads.googleads.v21.services.RunBatchJobRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_ADD_BATCH_JOB_OPERATIONS: serviceImpl.addBatchJobOperations((com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getMutateBatchJobMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v21.services.MutateBatchJobRequest, com.google.ads.googleads.v21.services.MutateBatchJobResponse>( service, METHODID_MUTATE_BATCH_JOB))) .addMethod( getListBatchJobResultsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v21.services.ListBatchJobResultsRequest, com.google.ads.googleads.v21.services.ListBatchJobResultsResponse>( service, METHODID_LIST_BATCH_JOB_RESULTS))) .addMethod( getRunBatchJobMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v21.services.RunBatchJobRequest, com.google.longrunning.Operation>( service, METHODID_RUN_BATCH_JOB))) .addMethod( getAddBatchJobOperationsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v21.services.AddBatchJobOperationsRequest, com.google.ads.googleads.v21.services.AddBatchJobOperationsResponse>( service, METHODID_ADD_BATCH_JOB_OPERATIONS))) .build(); } private static abstract class BatchJobServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { BatchJobServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.ads.googleads.v21.services.BatchJobServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("BatchJobService"); } } private static final class BatchJobServiceFileDescriptorSupplier extends BatchJobServiceBaseDescriptorSupplier { BatchJobServiceFileDescriptorSupplier() {} } private static final class BatchJobServiceMethodDescriptorSupplier extends BatchJobServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; BatchJobServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (BatchJobServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new BatchJobServiceFileDescriptorSupplier()) .addMethod(getMutateBatchJobMethod()) .addMethod(getListBatchJobResultsMethod()) .addMethod(getRunBatchJobMethod()) .addMethod(getAddBatchJobOperationsMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
35,413
java-notebooks/proto-google-cloud-notebooks-v1beta1/src/main/java/com/google/cloud/notebooks/v1beta1/CreateInstanceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/notebooks/v1beta1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.notebooks.v1beta1; /** * * * <pre> * Request for creating a notebook instance. * </pre> * * Protobuf type {@code google.cloud.notebooks.v1beta1.CreateInstanceRequest} */ public final class CreateInstanceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.notebooks.v1beta1.CreateInstanceRequest) CreateInstanceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateInstanceRequest.newBuilder() to construct. private CreateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateInstanceRequest() { parent_ = ""; instanceId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateInstanceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v1beta1.NotebooksProto .internal_static_google_cloud_notebooks_v1beta1_CreateInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v1beta1.NotebooksProto .internal_static_google_cloud_notebooks_v1beta1_CreateInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v1beta1.CreateInstanceRequest.class, com.google.cloud.notebooks.v1beta1.CreateInstanceRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Format: * `parent=projects/{project_id}/locations/{location}` * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Format: * `parent=projects/{project_id}/locations/{location}` * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INSTANCE_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object instanceId_ = ""; /** * * * <pre> * Required. User-defined unique ID of this instance. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The instanceId. */ @java.lang.Override public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } } /** * * * <pre> * Required. User-defined unique ID of this instance. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for instanceId. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INSTANCE_FIELD_NUMBER = 3; private com.google.cloud.notebooks.v1beta1.Instance instance_; /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ @java.lang.Override public boolean hasInstance() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ @java.lang.Override public com.google.cloud.notebooks.v1beta1.Instance getInstance() { return instance_ == null ? com.google.cloud.notebooks.v1beta1.Instance.getDefaultInstance() : instance_; } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.notebooks.v1beta1.InstanceOrBuilder getInstanceOrBuilder() { return instance_ == null ? com.google.cloud.notebooks.v1beta1.Instance.getDefaultInstance() : instance_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getInstance()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getInstance()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.notebooks.v1beta1.CreateInstanceRequest)) { return super.equals(obj); } com.google.cloud.notebooks.v1beta1.CreateInstanceRequest other = (com.google.cloud.notebooks.v1beta1.CreateInstanceRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getInstanceId().equals(other.getInstanceId())) return false; if (hasInstance() != other.hasInstance()) return false; if (hasInstance()) { if (!getInstance().equals(other.getInstance())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.notebooks.v1beta1.CreateInstanceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for creating a notebook instance. * </pre> * * Protobuf type {@code google.cloud.notebooks.v1beta1.CreateInstanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v1beta1.CreateInstanceRequest) com.google.cloud.notebooks.v1beta1.CreateInstanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v1beta1.NotebooksProto .internal_static_google_cloud_notebooks_v1beta1_CreateInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v1beta1.NotebooksProto .internal_static_google_cloud_notebooks_v1beta1_CreateInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v1beta1.CreateInstanceRequest.class, com.google.cloud.notebooks.v1beta1.CreateInstanceRequest.Builder.class); } // Construct using com.google.cloud.notebooks.v1beta1.CreateInstanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getInstanceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; instanceId_ = ""; instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.notebooks.v1beta1.NotebooksProto .internal_static_google_cloud_notebooks_v1beta1_CreateInstanceRequest_descriptor; } @java.lang.Override public com.google.cloud.notebooks.v1beta1.CreateInstanceRequest getDefaultInstanceForType() { return com.google.cloud.notebooks.v1beta1.CreateInstanceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.notebooks.v1beta1.CreateInstanceRequest build() { com.google.cloud.notebooks.v1beta1.CreateInstanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.notebooks.v1beta1.CreateInstanceRequest buildPartial() { com.google.cloud.notebooks.v1beta1.CreateInstanceRequest result = new com.google.cloud.notebooks.v1beta1.CreateInstanceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.notebooks.v1beta1.CreateInstanceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instanceId_ = instanceId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.notebooks.v1beta1.CreateInstanceRequest) { return mergeFrom((com.google.cloud.notebooks.v1beta1.CreateInstanceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.notebooks.v1beta1.CreateInstanceRequest other) { if (other == com.google.cloud.notebooks.v1beta1.CreateInstanceRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getInstanceId().isEmpty()) { instanceId_ = other.instanceId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasInstance()) { mergeInstance(other.getInstance()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { instanceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Format: * `parent=projects/{project_id}/locations/{location}` * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Format: * `parent=projects/{project_id}/locations/{location}` * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Format: * `parent=projects/{project_id}/locations/{location}` * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Format: * `parent=projects/{project_id}/locations/{location}` * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Format: * `parent=projects/{project_id}/locations/{location}` * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object instanceId_ = ""; /** * * * <pre> * Required. User-defined unique ID of this instance. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The instanceId. */ public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. User-defined unique ID of this instance. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for instanceId. */ public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. User-defined unique ID of this instance. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The instanceId to set. * @return This builder for chaining. */ public Builder setInstanceId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User-defined unique ID of this instance. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearInstanceId() { instanceId_ = getDefaultInstance().getInstanceId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. User-defined unique ID of this instance. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for instanceId to set. * @return This builder for chaining. */ public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.notebooks.v1beta1.Instance instance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.notebooks.v1beta1.Instance, com.google.cloud.notebooks.v1beta1.Instance.Builder, com.google.cloud.notebooks.v1beta1.InstanceOrBuilder> instanceBuilder_; /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ public boolean hasInstance() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ public com.google.cloud.notebooks.v1beta1.Instance getInstance() { if (instanceBuilder_ == null) { return instance_ == null ? com.google.cloud.notebooks.v1beta1.Instance.getDefaultInstance() : instance_; } else { return instanceBuilder_.getMessage(); } } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance(com.google.cloud.notebooks.v1beta1.Instance value) { if (instanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; } else { instanceBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.notebooks.v1beta1.Instance.Builder builderForValue) { if (instanceBuilder_ == null) { instance_ = builderForValue.build(); } else { instanceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeInstance(com.google.cloud.notebooks.v1beta1.Instance value) { if (instanceBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && instance_ != null && instance_ != com.google.cloud.notebooks.v1beta1.Instance.getDefaultInstance()) { getInstanceBuilder().mergeFrom(value); } else { instance_ = value; } } else { instanceBuilder_.mergeFrom(value); } if (instance_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000004); instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.notebooks.v1beta1.Instance.Builder getInstanceBuilder() { bitField0_ |= 0x00000004; onChanged(); return getInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.notebooks.v1beta1.InstanceOrBuilder getInstanceOrBuilder() { if (instanceBuilder_ != null) { return instanceBuilder_.getMessageOrBuilder(); } else { return instance_ == null ? com.google.cloud.notebooks.v1beta1.Instance.getDefaultInstance() : instance_; } } /** * * * <pre> * Required. The instance to be created. * </pre> * * <code> * .google.cloud.notebooks.v1beta1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.notebooks.v1beta1.Instance, com.google.cloud.notebooks.v1beta1.Instance.Builder, com.google.cloud.notebooks.v1beta1.InstanceOrBuilder> getInstanceFieldBuilder() { if (instanceBuilder_ == null) { instanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.notebooks.v1beta1.Instance, com.google.cloud.notebooks.v1beta1.Instance.Builder, com.google.cloud.notebooks.v1beta1.InstanceOrBuilder>( getInstance(), getParentForChildren(), isClean()); instance_ = null; } return instanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v1beta1.CreateInstanceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.notebooks.v1beta1.CreateInstanceRequest) private static final com.google.cloud.notebooks.v1beta1.CreateInstanceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.notebooks.v1beta1.CreateInstanceRequest(); } public static com.google.cloud.notebooks.v1beta1.CreateInstanceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateInstanceRequest> PARSER = new com.google.protobuf.AbstractParser<CreateInstanceRequest>() { @java.lang.Override public CreateInstanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateInstanceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateInstanceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.notebooks.v1beta1.CreateInstanceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,500
java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/ExpandedDataSetFilterExpressionList.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1alpha/expanded_data_set.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1alpha; /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList} */ public final class ExpandedDataSetFilterExpressionList extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList) ExpandedDataSetFilterExpressionListOrBuilder { private static final long serialVersionUID = 0L; // Use ExpandedDataSetFilterExpressionList.newBuilder() to construct. private ExpandedDataSetFilterExpressionList( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExpandedDataSetFilterExpressionList() { filterExpressions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExpandedDataSetFilterExpressionList(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.ExpandedDataSetProto .internal_static_google_analytics_admin_v1alpha_ExpandedDataSetFilterExpressionList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.ExpandedDataSetProto .internal_static_google_analytics_admin_v1alpha_ExpandedDataSetFilterExpressionList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList.class, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList.Builder.class); } public static final int FILTER_EXPRESSIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression> filterExpressions_; /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ @java.lang.Override public java.util.List<com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression> getFilterExpressionsList() { return filterExpressions_; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionOrBuilder> getFilterExpressionsOrBuilderList() { return filterExpressions_; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ @java.lang.Override public int getFilterExpressionsCount() { return filterExpressions_.size(); } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ @java.lang.Override public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression getFilterExpressions( int index) { return filterExpressions_.get(index); } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ @java.lang.Override public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionOrBuilder getFilterExpressionsOrBuilder(int index) { return filterExpressions_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < filterExpressions_.size(); i++) { output.writeMessage(1, filterExpressions_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < filterExpressions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, filterExpressions_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList)) { return super.equals(obj); } com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList other = (com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList) obj; if (!getFilterExpressionsList().equals(other.getFilterExpressionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getFilterExpressionsCount() > 0) { hash = (37 * hash) + FILTER_EXPRESSIONS_FIELD_NUMBER; hash = (53 * hash) + getFilterExpressionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList) com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.ExpandedDataSetProto .internal_static_google_analytics_admin_v1alpha_ExpandedDataSetFilterExpressionList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.ExpandedDataSetProto .internal_static_google_analytics_admin_v1alpha_ExpandedDataSetFilterExpressionList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList.class, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList.Builder.class); } // Construct using // com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (filterExpressionsBuilder_ == null) { filterExpressions_ = java.util.Collections.emptyList(); } else { filterExpressions_ = null; filterExpressionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1alpha.ExpandedDataSetProto .internal_static_google_analytics_admin_v1alpha_ExpandedDataSetFilterExpressionList_descriptor; } @java.lang.Override public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList getDefaultInstanceForType() { return com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList .getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList build() { com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList buildPartial() { com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList result = new com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList result) { if (filterExpressionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { filterExpressions_ = java.util.Collections.unmodifiableList(filterExpressions_); bitField0_ = (bitField0_ & ~0x00000001); } result.filterExpressions_ = filterExpressions_; } else { result.filterExpressions_ = filterExpressionsBuilder_.build(); } } private void buildPartial0( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList) { return mergeFrom( (com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList other) { if (other == com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList .getDefaultInstance()) return this; if (filterExpressionsBuilder_ == null) { if (!other.filterExpressions_.isEmpty()) { if (filterExpressions_.isEmpty()) { filterExpressions_ = other.filterExpressions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFilterExpressionsIsMutable(); filterExpressions_.addAll(other.filterExpressions_); } onChanged(); } } else { if (!other.filterExpressions_.isEmpty()) { if (filterExpressionsBuilder_.isEmpty()) { filterExpressionsBuilder_.dispose(); filterExpressionsBuilder_ = null; filterExpressions_ = other.filterExpressions_; bitField0_ = (bitField0_ & ~0x00000001); filterExpressionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFilterExpressionsFieldBuilder() : null; } else { filterExpressionsBuilder_.addAllMessages(other.filterExpressions_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression m = input.readMessage( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.parser(), extensionRegistry); if (filterExpressionsBuilder_ == null) { ensureFilterExpressionsIsMutable(); filterExpressions_.add(m); } else { filterExpressionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression> filterExpressions_ = java.util.Collections.emptyList(); private void ensureFilterExpressionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { filterExpressions_ = new java.util.ArrayList< com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression>( filterExpressions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionOrBuilder> filterExpressionsBuilder_; /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public java.util.List<com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression> getFilterExpressionsList() { if (filterExpressionsBuilder_ == null) { return java.util.Collections.unmodifiableList(filterExpressions_); } else { return filterExpressionsBuilder_.getMessageList(); } } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public int getFilterExpressionsCount() { if (filterExpressionsBuilder_ == null) { return filterExpressions_.size(); } else { return filterExpressionsBuilder_.getCount(); } } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression getFilterExpressions( int index) { if (filterExpressionsBuilder_ == null) { return filterExpressions_.get(index); } else { return filterExpressionsBuilder_.getMessage(index); } } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder setFilterExpressions( int index, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression value) { if (filterExpressionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFilterExpressionsIsMutable(); filterExpressions_.set(index, value); onChanged(); } else { filterExpressionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder setFilterExpressions( int index, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder builderForValue) { if (filterExpressionsBuilder_ == null) { ensureFilterExpressionsIsMutable(); filterExpressions_.set(index, builderForValue.build()); onChanged(); } else { filterExpressionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder addFilterExpressions( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression value) { if (filterExpressionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFilterExpressionsIsMutable(); filterExpressions_.add(value); onChanged(); } else { filterExpressionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder addFilterExpressions( int index, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression value) { if (filterExpressionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFilterExpressionsIsMutable(); filterExpressions_.add(index, value); onChanged(); } else { filterExpressionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder addFilterExpressions( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder builderForValue) { if (filterExpressionsBuilder_ == null) { ensureFilterExpressionsIsMutable(); filterExpressions_.add(builderForValue.build()); onChanged(); } else { filterExpressionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder addFilterExpressions( int index, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder builderForValue) { if (filterExpressionsBuilder_ == null) { ensureFilterExpressionsIsMutable(); filterExpressions_.add(index, builderForValue.build()); onChanged(); } else { filterExpressionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder addAllFilterExpressions( java.lang.Iterable< ? extends com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression> values) { if (filterExpressionsBuilder_ == null) { ensureFilterExpressionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, filterExpressions_); onChanged(); } else { filterExpressionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder clearFilterExpressions() { if (filterExpressionsBuilder_ == null) { filterExpressions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { filterExpressionsBuilder_.clear(); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public Builder removeFilterExpressions(int index) { if (filterExpressionsBuilder_ == null) { ensureFilterExpressionsIsMutable(); filterExpressions_.remove(index); onChanged(); } else { filterExpressionsBuilder_.remove(index); } return this; } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder getFilterExpressionsBuilder(int index) { return getFilterExpressionsFieldBuilder().getBuilder(index); } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionOrBuilder getFilterExpressionsOrBuilder(int index) { if (filterExpressionsBuilder_ == null) { return filterExpressions_.get(index); } else { return filterExpressionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public java.util.List< ? extends com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionOrBuilder> getFilterExpressionsOrBuilderList() { if (filterExpressionsBuilder_ != null) { return filterExpressionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(filterExpressions_); } } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder addFilterExpressionsBuilder() { return getFilterExpressionsFieldBuilder() .addBuilder( com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression .getDefaultInstance()); } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder addFilterExpressionsBuilder(int index) { return getFilterExpressionsFieldBuilder() .addBuilder( index, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression .getDefaultInstance()); } /** * * * <pre> * A list of ExpandedDataSet filter expressions. * </pre> * * <code> * repeated .google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression filter_expressions = 1; * </code> */ public java.util.List< com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder> getFilterExpressionsBuilderList() { return getFilterExpressionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionOrBuilder> getFilterExpressionsFieldBuilder() { if (filterExpressionsBuilder_ == null) { filterExpressionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpression.Builder, com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionOrBuilder>( filterExpressions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); filterExpressions_ = null; } return filterExpressionsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList) private static final com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList(); } public static com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExpandedDataSetFilterExpressionList> PARSER = new com.google.protobuf.AbstractParser<ExpandedDataSetFilterExpressionList>() { @java.lang.Override public ExpandedDataSetFilterExpressionList parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExpandedDataSetFilterExpressionList> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExpandedDataSetFilterExpressionList> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1alpha.ExpandedDataSetFilterExpressionList getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/lucene
35,577
lucene/core/src/java/org/apache/lucene/codecs/lucene90/compressing/Lucene90CompressingTermVectorsWriter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.codecs.lucene90.compressing; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; import java.io.IOException; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Deque; import java.util.Iterator; import java.util.List; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.TermVectorsReader; import org.apache.lucene.codecs.TermVectorsWriter; import org.apache.lucene.codecs.compressing.CompressionMode; import org.apache.lucene.codecs.compressing.Compressor; import org.apache.lucene.codecs.compressing.MatchingReaders; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocIDMerger; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.store.ByteBuffersDataInput; import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.packed.BlockPackedWriter; import org.apache.lucene.util.packed.DirectWriter; import org.apache.lucene.util.packed.PackedInts; /** * {@link TermVectorsWriter} for {@link Lucene90CompressingTermVectorsFormat}. * * @lucene.experimental */ public final class Lucene90CompressingTermVectorsWriter extends TermVectorsWriter { static final String VECTORS_EXTENSION = "tvd"; static final String VECTORS_INDEX_EXTENSION = "tvx"; static final String VECTORS_META_EXTENSION = "tvm"; static final String VECTORS_INDEX_CODEC_NAME = "Lucene90TermVectorsIndex"; static final int VERSION_START = 0; static final int VERSION_CURRENT = VERSION_START; static final int META_VERSION_START = 0; static final int PACKED_BLOCK_SIZE = 64; static final int POSITIONS = 0x01; static final int OFFSETS = 0x02; static final int PAYLOADS = 0x04; static final int FLAGS_BITS = DirectWriter.bitsRequired(POSITIONS | OFFSETS | PAYLOADS); private final String segment; private FieldsIndexWriter indexWriter; private IndexOutput metaStream, vectorsStream; private final CompressionMode compressionMode; private final Compressor compressor; private final int chunkSize; private long numChunks; // number of chunks private long numDirtyChunks; // number of incomplete compressed blocks written private long numDirtyDocs; // cumulative number of docs in incomplete chunks /** a pending doc */ private class DocData { final int numFields; final Deque<FieldData> fields; final int posStart, offStart, payStart; DocData(int numFields, int posStart, int offStart, int payStart) { this.numFields = numFields; this.fields = new ArrayDeque<>(numFields); this.posStart = posStart; this.offStart = offStart; this.payStart = payStart; } FieldData addField( int fieldNum, int numTerms, boolean positions, boolean offsets, boolean payloads) { final FieldData field; if (fields.isEmpty()) { field = new FieldData( fieldNum, numTerms, positions, offsets, payloads, posStart, offStart, payStart); } else { final FieldData last = fields.getLast(); final int posStart = last.posStart + (last.hasPositions ? last.totalPositions : 0); final int offStart = last.offStart + (last.hasOffsets ? last.totalPositions : 0); final int payStart = last.payStart + (last.hasPayloads ? last.totalPositions : 0); field = new FieldData( fieldNum, numTerms, positions, offsets, payloads, posStart, offStart, payStart); } fields.add(field); return field; } } private DocData addDocData(int numVectorFields) { FieldData last = null; for (Iterator<DocData> it = pendingDocs.descendingIterator(); it.hasNext(); ) { final DocData doc = it.next(); if (!doc.fields.isEmpty()) { last = doc.fields.getLast(); break; } } final DocData doc; if (last == null) { doc = new DocData(numVectorFields, 0, 0, 0); } else { final int posStart = last.posStart + (last.hasPositions ? last.totalPositions : 0); final int offStart = last.offStart + (last.hasOffsets ? last.totalPositions : 0); final int payStart = last.payStart + (last.hasPayloads ? last.totalPositions : 0); doc = new DocData(numVectorFields, posStart, offStart, payStart); } pendingDocs.add(doc); return doc; } /** a pending field */ private class FieldData { final boolean hasPositions, hasOffsets, hasPayloads; final int fieldNum, flags, numTerms; final int[] freqs, prefixLengths, suffixLengths; final int posStart, offStart, payStart; int totalPositions; int ord; FieldData( int fieldNum, int numTerms, boolean positions, boolean offsets, boolean payloads, int posStart, int offStart, int payStart) { this.fieldNum = fieldNum; this.numTerms = numTerms; this.hasPositions = positions; this.hasOffsets = offsets; this.hasPayloads = payloads; this.flags = (positions ? POSITIONS : 0) | (offsets ? OFFSETS : 0) | (payloads ? PAYLOADS : 0); this.freqs = new int[numTerms]; this.prefixLengths = new int[numTerms]; this.suffixLengths = new int[numTerms]; this.posStart = posStart; this.offStart = offStart; this.payStart = payStart; totalPositions = 0; ord = 0; } void addTerm(int freq, int prefixLength, int suffixLength) { freqs[ord] = freq; prefixLengths[ord] = prefixLength; suffixLengths[ord] = suffixLength; ++ord; } void addPosition(int position, int startOffset, int length, int payloadLength) { if (hasPositions) { if (posStart + totalPositions == positionsBuf.length) { positionsBuf = ArrayUtil.grow(positionsBuf); } positionsBuf[posStart + totalPositions] = position; } if (hasOffsets) { if (offStart + totalPositions == startOffsetsBuf.length) { final int newLength = ArrayUtil.oversize(offStart + totalPositions, 4); startOffsetsBuf = ArrayUtil.growExact(startOffsetsBuf, newLength); lengthsBuf = ArrayUtil.growExact(lengthsBuf, newLength); } startOffsetsBuf[offStart + totalPositions] = startOffset; lengthsBuf[offStart + totalPositions] = length; } if (hasPayloads) { if (payStart + totalPositions == payloadLengthsBuf.length) { payloadLengthsBuf = ArrayUtil.grow(payloadLengthsBuf); } payloadLengthsBuf[payStart + totalPositions] = payloadLength; } ++totalPositions; } } private int numDocs; // total number of docs seen private final Deque<DocData> pendingDocs; // pending docs private DocData curDoc; // current document private FieldData curField; // current field private final BytesRef lastTerm; private int[] positionsBuf, startOffsetsBuf, lengthsBuf, payloadLengthsBuf; private final ByteBuffersDataOutput termSuffixes; // buffered term suffixes private final ByteBuffersDataOutput payloadBytes; // buffered term payloads private final BlockPackedWriter writer; private final int maxDocsPerChunk; // hard limit on number of docs per chunk private final ByteBuffersDataOutput scratchBuffer = ByteBuffersDataOutput.newResettableInstance(); /** Sole constructor. */ Lucene90CompressingTermVectorsWriter( Directory directory, SegmentInfo si, String segmentSuffix, IOContext context, String formatName, CompressionMode compressionMode, int chunkSize, int maxDocsPerChunk, int blockShift) throws IOException { assert directory != null; this.segment = si.name; this.compressionMode = compressionMode; this.compressor = compressionMode.newCompressor(); this.chunkSize = chunkSize; this.maxDocsPerChunk = maxDocsPerChunk; numDocs = 0; pendingDocs = new ArrayDeque<>(); termSuffixes = ByteBuffersDataOutput.newResettableInstance(); payloadBytes = ByteBuffersDataOutput.newResettableInstance(); lastTerm = new BytesRef(ArrayUtil.oversize(30, 1)); try { metaStream = directory.createOutput( IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_META_EXTENSION), context); CodecUtil.writeIndexHeader( metaStream, VECTORS_INDEX_CODEC_NAME + "Meta", VERSION_CURRENT, si.getId(), segmentSuffix); assert CodecUtil.indexHeaderLength(VECTORS_INDEX_CODEC_NAME + "Meta", segmentSuffix) == metaStream.getFilePointer(); vectorsStream = directory.createOutput( IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_EXTENSION), context); CodecUtil.writeIndexHeader( vectorsStream, formatName, VERSION_CURRENT, si.getId(), segmentSuffix); assert CodecUtil.indexHeaderLength(formatName, segmentSuffix) == vectorsStream.getFilePointer(); indexWriter = new FieldsIndexWriter( directory, segment, segmentSuffix, VECTORS_INDEX_EXTENSION, VECTORS_INDEX_CODEC_NAME, si.getId(), blockShift, context); metaStream.writeVInt(PackedInts.VERSION_CURRENT); metaStream.writeVInt(chunkSize); writer = new BlockPackedWriter(vectorsStream, PACKED_BLOCK_SIZE); positionsBuf = new int[1024]; startOffsetsBuf = new int[1024]; lengthsBuf = new int[1024]; payloadLengthsBuf = new int[1024]; } catch (Throwable t) { IOUtils.closeWhileSuppressingExceptions( t, metaStream, vectorsStream, indexWriter, indexWriter); throw t; } } @Override public void close() throws IOException { try { IOUtils.close(metaStream, vectorsStream, indexWriter); } finally { metaStream = null; vectorsStream = null; indexWriter = null; } } @Override public void startDocument(int numVectorFields) throws IOException { curDoc = addDocData(numVectorFields); } @Override public void finishDocument() throws IOException { // append the payload bytes of the doc after its terms payloadBytes.copyTo(termSuffixes); payloadBytes.reset(); ++numDocs; if (triggerFlush()) { flush(false); } curDoc = null; } @Override public void startField( FieldInfo info, int numTerms, boolean positions, boolean offsets, boolean payloads) throws IOException { curField = curDoc.addField(info.number, numTerms, positions, offsets, payloads); lastTerm.length = 0; } @Override public void finishField() throws IOException { curField = null; } @Override public void startTerm(BytesRef term, int freq) throws IOException { assert freq >= 1; final int prefix; if (lastTerm.length == 0) { // no previous term: no bytes to write prefix = 0; } else { prefix = StringHelper.bytesDifference(lastTerm, term); } curField.addTerm(freq, prefix, term.length - prefix); termSuffixes.writeBytes(term.bytes, term.offset + prefix, term.length - prefix); // copy last term if (lastTerm.bytes.length < term.length) { lastTerm.bytes = new byte[ArrayUtil.oversize(term.length, 1)]; } lastTerm.offset = 0; lastTerm.length = term.length; System.arraycopy(term.bytes, term.offset, lastTerm.bytes, 0, term.length); } @Override public void addPosition(int position, int startOffset, int endOffset, BytesRef payload) throws IOException { assert curField.flags != 0; curField.addPosition( position, startOffset, endOffset - startOffset, payload == null ? 0 : payload.length); if (curField.hasPayloads && payload != null) { payloadBytes.writeBytes(payload.bytes, payload.offset, payload.length); } } private boolean triggerFlush() { return termSuffixes.size() >= chunkSize || pendingDocs.size() >= maxDocsPerChunk; } private void flush(boolean force) throws IOException { assert force != triggerFlush(); final int chunkDocs = pendingDocs.size(); assert chunkDocs > 0 : chunkDocs; numChunks++; if (force) { numDirtyChunks++; // incomplete: we had to force this flush numDirtyDocs += pendingDocs.size(); } // write the index file indexWriter.writeIndex(chunkDocs, vectorsStream.getFilePointer()); final int docBase = numDocs - chunkDocs; vectorsStream.writeVInt(docBase); final int dirtyBit = force ? 1 : 0; vectorsStream.writeVInt((chunkDocs << 1) | dirtyBit); // total number of fields of the chunk final int totalFields = flushNumFields(chunkDocs); if (totalFields > 0) { // unique field numbers (sorted) final int[] fieldNums = flushFieldNums(); // offsets in the array of unique field numbers flushFields(totalFields, fieldNums); // flags (does the field have positions, offsets, payloads?) flushFlags(totalFields, fieldNums); // number of terms of each field flushNumTerms(totalFields); // prefix and suffix lengths for each field flushTermLengths(); // term freqs - 1 (because termFreq is always >=1) for each term flushTermFreqs(); // positions for all terms, when enabled flushPositions(); // offsets for all terms, when enabled flushOffsets(fieldNums); // payload lengths for all terms, when enabled flushPayloadLengths(); // compress terms and payloads and write them to the output // using ByteBuffersDataInput reduce memory copy ByteBuffersDataInput content = termSuffixes.toDataInput(); compressor.compress(content, vectorsStream); } // reset pendingDocs.clear(); curDoc = null; curField = null; termSuffixes.reset(); } private int flushNumFields(int chunkDocs) throws IOException { if (chunkDocs == 1) { final int numFields = pendingDocs.getFirst().numFields; vectorsStream.writeVInt(numFields); return numFields; } else { writer.reset(vectorsStream); int totalFields = 0; for (DocData dd : pendingDocs) { writer.add(dd.numFields); totalFields += dd.numFields; } writer.finish(); return totalFields; } } /** Returns a sorted array containing unique field numbers */ private int[] flushFieldNums() throws IOException { IntHashSet fieldNumsSet = new IntHashSet(); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { fieldNumsSet.add(fd.fieldNum); } } int[] fieldNums = fieldNumsSet.toArray(); Arrays.sort(fieldNums); final int numDistinctFields = fieldNums.length; assert numDistinctFields > 0; final int bitsRequired = PackedInts.bitsRequired(fieldNums[numDistinctFields - 1]); final int token = (Math.min(numDistinctFields - 1, 0x07) << 5) | bitsRequired; vectorsStream.writeByte((byte) token); if (numDistinctFields - 1 >= 0x07) { vectorsStream.writeVInt(numDistinctFields - 1 - 0x07); } final PackedInts.Writer writer = PackedInts.getWriterNoHeader( vectorsStream, PackedInts.Format.PACKED, numDistinctFields, bitsRequired, 1); for (Integer fieldNum : fieldNums) { writer.add(fieldNum); } writer.finish(); return fieldNums; } private void flushFields(int totalFields, int[] fieldNums) throws IOException { scratchBuffer.reset(); final DirectWriter writer = DirectWriter.getInstance( scratchBuffer, totalFields, DirectWriter.bitsRequired(fieldNums.length - 1)); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { final int fieldNumIndex = Arrays.binarySearch(fieldNums, fd.fieldNum); assert fieldNumIndex >= 0; writer.add(fieldNumIndex); } } writer.finish(); vectorsStream.writeVLong(scratchBuffer.size()); scratchBuffer.copyTo(vectorsStream); } private void flushFlags(int totalFields, int[] fieldNums) throws IOException { // check if fields always have the same flags boolean nonChangingFlags = true; int[] fieldFlags = new int[fieldNums.length]; Arrays.fill(fieldFlags, -1); outer: for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { final int fieldNumOff = Arrays.binarySearch(fieldNums, fd.fieldNum); assert fieldNumOff >= 0; if (fieldFlags[fieldNumOff] == -1) { fieldFlags[fieldNumOff] = fd.flags; } else if (fieldFlags[fieldNumOff] != fd.flags) { nonChangingFlags = false; break outer; } } } if (nonChangingFlags) { // write one flag per field num vectorsStream.writeVInt(0); scratchBuffer.reset(); final DirectWriter writer = DirectWriter.getInstance(scratchBuffer, fieldFlags.length, FLAGS_BITS); for (int flags : fieldFlags) { assert flags >= 0; writer.add(flags); } writer.finish(); vectorsStream.writeVInt(Math.toIntExact(scratchBuffer.size())); scratchBuffer.copyTo(vectorsStream); } else { // write one flag for every field instance vectorsStream.writeVInt(1); scratchBuffer.reset(); final DirectWriter writer = DirectWriter.getInstance(scratchBuffer, totalFields, FLAGS_BITS); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { writer.add(fd.flags); } } writer.finish(); vectorsStream.writeVInt(Math.toIntExact(scratchBuffer.size())); scratchBuffer.copyTo(vectorsStream); } } private void flushNumTerms(int totalFields) throws IOException { int maxNumTerms = 0; for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { maxNumTerms |= fd.numTerms; } } final int bitsRequired = DirectWriter.bitsRequired(maxNumTerms); vectorsStream.writeVInt(bitsRequired); scratchBuffer.reset(); final DirectWriter writer = DirectWriter.getInstance(scratchBuffer, totalFields, bitsRequired); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { writer.add(fd.numTerms); } } writer.finish(); vectorsStream.writeVInt(Math.toIntExact(scratchBuffer.size())); scratchBuffer.copyTo(vectorsStream); } private void flushTermLengths() throws IOException { writer.reset(vectorsStream); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { for (int i = 0; i < fd.numTerms; ++i) { writer.add(fd.prefixLengths[i]); } } } writer.finish(); writer.reset(vectorsStream); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { for (int i = 0; i < fd.numTerms; ++i) { writer.add(fd.suffixLengths[i]); } } } writer.finish(); } private void flushTermFreqs() throws IOException { writer.reset(vectorsStream); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { for (int i = 0; i < fd.numTerms; ++i) { writer.add(fd.freqs[i] - 1); } } } writer.finish(); } private void flushPositions() throws IOException { writer.reset(vectorsStream); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { if (fd.hasPositions) { int pos = 0; for (int i = 0; i < fd.numTerms; ++i) { int previousPosition = 0; for (int j = 0; j < fd.freqs[i]; ++j) { final int position = positionsBuf[fd.posStart + pos++]; writer.add(position - previousPosition); previousPosition = position; } } assert pos == fd.totalPositions; } } } writer.finish(); } private void flushOffsets(int[] fieldNums) throws IOException { boolean hasOffsets = false; long[] sumPos = new long[fieldNums.length]; long[] sumOffsets = new long[fieldNums.length]; for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { hasOffsets |= fd.hasOffsets; if (fd.hasOffsets && fd.hasPositions) { final int fieldNumOff = Arrays.binarySearch(fieldNums, fd.fieldNum); int pos = 0; for (int i = 0; i < fd.numTerms; ++i) { sumPos[fieldNumOff] += positionsBuf[fd.posStart + fd.freqs[i] - 1 + pos]; sumOffsets[fieldNumOff] += startOffsetsBuf[fd.offStart + fd.freqs[i] - 1 + pos]; pos += fd.freqs[i]; } assert pos == fd.totalPositions; } } } if (!hasOffsets) { // nothing to do return; } final float[] charsPerTerm = new float[fieldNums.length]; for (int i = 0; i < fieldNums.length; ++i) { charsPerTerm[i] = (sumPos[i] <= 0 || sumOffsets[i] <= 0) ? 0 : (float) ((double) sumOffsets[i] / sumPos[i]); } // start offsets for (int i = 0; i < fieldNums.length; ++i) { vectorsStream.writeInt(Float.floatToRawIntBits(charsPerTerm[i])); } writer.reset(vectorsStream); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { if ((fd.flags & OFFSETS) != 0) { final int fieldNumOff = Arrays.binarySearch(fieldNums, fd.fieldNum); final float cpt = charsPerTerm[fieldNumOff]; int pos = 0; for (int i = 0; i < fd.numTerms; ++i) { int previousPos = 0; int previousOff = 0; for (int j = 0; j < fd.freqs[i]; ++j) { final int position = fd.hasPositions ? positionsBuf[fd.posStart + pos] : 0; final int startOffset = startOffsetsBuf[fd.offStart + pos]; writer.add(startOffset - previousOff - (int) (cpt * (position - previousPos))); previousPos = position; previousOff = startOffset; ++pos; } } } } } writer.finish(); // lengths writer.reset(vectorsStream); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { if ((fd.flags & OFFSETS) != 0) { int pos = 0; for (int i = 0; i < fd.numTerms; ++i) { for (int j = 0; j < fd.freqs[i]; ++j) { writer.add( lengthsBuf[fd.offStart + pos++] - fd.prefixLengths[i] - fd.suffixLengths[i]); } } assert pos == fd.totalPositions; } } } writer.finish(); } private void flushPayloadLengths() throws IOException { writer.reset(vectorsStream); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { if (fd.hasPayloads) { for (int i = 0; i < fd.totalPositions; ++i) { writer.add(payloadLengthsBuf[fd.payStart + i]); } } } } writer.finish(); } @Override public void finish(int numDocs) throws IOException { if (!pendingDocs.isEmpty()) { flush(true); } if (numDocs != this.numDocs) { throw new RuntimeException( "Wrote " + this.numDocs + " docs, finish called with numDocs=" + numDocs); } indexWriter.finish(numDocs, vectorsStream.getFilePointer(), metaStream); metaStream.writeVLong(numChunks); metaStream.writeVLong(numDirtyChunks); metaStream.writeVLong(numDirtyDocs); CodecUtil.writeFooter(metaStream); CodecUtil.writeFooter(vectorsStream); } @Override public void addProx(int numProx, DataInput positions, DataInput offsets) throws IOException { assert (curField.hasPositions) == (positions != null); assert (curField.hasOffsets) == (offsets != null); if (curField.hasPositions) { final int posStart = curField.posStart + curField.totalPositions; if (posStart + numProx > positionsBuf.length) { positionsBuf = ArrayUtil.grow(positionsBuf, posStart + numProx); } int position = 0; if (curField.hasPayloads) { final int payStart = curField.payStart + curField.totalPositions; if (payStart + numProx > payloadLengthsBuf.length) { payloadLengthsBuf = ArrayUtil.grow(payloadLengthsBuf, payStart + numProx); } for (int i = 0; i < numProx; ++i) { final int code = positions.readVInt(); if ((code & 1) != 0) { // This position has a payload final int payloadLength = positions.readVInt(); payloadLengthsBuf[payStart + i] = payloadLength; payloadBytes.copyBytes(positions, payloadLength); } else { payloadLengthsBuf[payStart + i] = 0; } position += code >>> 1; positionsBuf[posStart + i] = position; } } else { for (int i = 0; i < numProx; ++i) { position += (positions.readVInt() >>> 1); positionsBuf[posStart + i] = position; } } } if (curField.hasOffsets) { final int offStart = curField.offStart + curField.totalPositions; if (offStart + numProx > startOffsetsBuf.length) { final int newLength = ArrayUtil.oversize(offStart + numProx, 4); startOffsetsBuf = ArrayUtil.growExact(startOffsetsBuf, newLength); lengthsBuf = ArrayUtil.growExact(lengthsBuf, newLength); } int lastOffset = 0, startOffset, endOffset; for (int i = 0; i < numProx; ++i) { startOffset = lastOffset + offsets.readVInt(); endOffset = startOffset + offsets.readVInt(); lastOffset = endOffset; startOffsetsBuf[offStart + i] = startOffset; lengthsBuf[offStart + i] = endOffset - startOffset; } } curField.totalPositions += numProx; } // bulk merge is scary: its caused corruption bugs in the past. // we try to be extra safe with this impl, but add an escape hatch to // have a workaround for undiscovered bugs. static final String BULK_MERGE_ENABLED_SYSPROP = Lucene90CompressingTermVectorsWriter.class.getName() + ".enableBulkMerge"; static final boolean BULK_MERGE_ENABLED; static { boolean v = true; try { v = Boolean.parseBoolean(System.getProperty(BULK_MERGE_ENABLED_SYSPROP, "true")); } catch (SecurityException _) { } BULK_MERGE_ENABLED = v; } private void copyChunks( final MergeState mergeState, final CompressingTermVectorsSub sub, final int fromDocID, final int toDocID) throws IOException { final Lucene90CompressingTermVectorsReader reader = (Lucene90CompressingTermVectorsReader) mergeState.termVectorsReaders[sub.readerIndex]; assert reader.getVersion() == VERSION_CURRENT; assert reader.getChunkSize() == chunkSize; assert reader.getCompressionMode() == compressionMode; assert !tooDirty(reader); assert mergeState.liveDocs[sub.readerIndex] == null; int docID = fromDocID; final FieldsIndex index = reader.getIndexReader(); // copy docs that belong to the previous chunk while (docID < toDocID && reader.isLoaded(docID)) { addAllDocVectors(reader.get(docID++), mergeState); } if (docID >= toDocID) { return; } // copy chunks long fromPointer = index.getStartPointer(docID); final long toPointer = toDocID == sub.maxDoc ? reader.getMaxPointer() : index.getStartPointer(toDocID); if (fromPointer < toPointer) { // flush any pending chunks if (!pendingDocs.isEmpty()) { flush(true); } final IndexInput rawDocs = reader.getVectorsStream(); rawDocs.seek(fromPointer); do { // iterate over each chunk. we use the vectors index to find chunk boundaries, // read the docstart + doccount from the chunk header (we write a new header, since doc // numbers will change), // and just copy the bytes directly. // read header final int base = rawDocs.readVInt(); if (base != docID) { throw new CorruptIndexException( "invalid state: base=" + base + ", docID=" + docID, rawDocs); } final int code = rawDocs.readVInt(); final int bufferedDocs = code >>> 1; // write a new index entry and new header for this chunk. indexWriter.writeIndex(bufferedDocs, vectorsStream.getFilePointer()); vectorsStream.writeVInt(numDocs); // rebase vectorsStream.writeVInt(code); docID += bufferedDocs; numDocs += bufferedDocs; if (docID > toDocID) { throw new CorruptIndexException( "invalid state: base=" + base + ", count=" + bufferedDocs + ", toDocID=" + toDocID, rawDocs); } // copy bytes until the next chunk boundary (or end of chunk data). // using the stored fields index for this isn't the most efficient, but fast enough // and is a source of redundancy for detecting bad things. final long end; if (docID == sub.maxDoc) { end = reader.getMaxPointer(); } else { end = index.getStartPointer(docID); } vectorsStream.copyBytes(rawDocs, end - rawDocs.getFilePointer()); ++numChunks; boolean dirtyChunk = (code & 1) != 0; if (dirtyChunk) { numDirtyChunks++; numDirtyDocs += bufferedDocs; } fromPointer = end; } while (fromPointer < toPointer); } // copy leftover docs that don't form a complete chunk assert reader.isLoaded(docID) == false; while (docID < toDocID) { addAllDocVectors(reader.get(docID++), mergeState); } } @Override public int merge(MergeState mergeState) throws IOException { final int numReaders = mergeState.termVectorsReaders.length; final MatchingReaders matchingReaders = new MatchingReaders(mergeState); final List<CompressingTermVectorsSub> subs = new ArrayList<>(numReaders); for (int i = 0; i < numReaders; i++) { final TermVectorsReader reader = mergeState.termVectorsReaders[i]; if (reader != null) { reader.checkIntegrity(); } final boolean bulkMerge = canPerformBulkMerge(mergeState, matchingReaders, i); subs.add(new CompressingTermVectorsSub(mergeState, bulkMerge, i)); } int docCount = 0; final DocIDMerger<CompressingTermVectorsSub> docIDMerger = DocIDMerger.of(subs, mergeState.needsIndexSort); CompressingTermVectorsSub sub = docIDMerger.next(); while (sub != null) { assert sub.mappedDocID == docCount : sub.mappedDocID + " != " + docCount; if (sub.canPerformBulkMerge) { final int fromDocID = sub.docID; int toDocID = fromDocID; final CompressingTermVectorsSub current = sub; while ((sub = docIDMerger.next()) == current) { ++toDocID; assert sub.docID == toDocID; } ++toDocID; // exclusive bound copyChunks(mergeState, current, fromDocID, toDocID); docCount += toDocID - fromDocID; } else { final TermVectorsReader reader = mergeState.termVectorsReaders[sub.readerIndex]; final Fields vectors = reader != null ? reader.get(sub.docID) : null; addAllDocVectors(vectors, mergeState); ++docCount; sub = docIDMerger.next(); } } finish(docCount); return docCount; } /** * Returns true if we should recompress this reader, even though we could bulk merge compressed * data * * <p>The last chunk written for a segment is typically incomplete, so without recompressing, in * some worst-case situations (e.g. frequent reopen with tiny flushes), over time the compression * ratio can degrade. This is a safety switch. */ boolean tooDirty(Lucene90CompressingTermVectorsReader candidate) { // A segment is considered dirty only if it has enough dirty docs to make a full block // AND more than 1% blocks are dirty. return candidate.getNumDirtyDocs() > maxDocsPerChunk && candidate.getNumDirtyChunks() * 100 > candidate.getNumChunks(); } private boolean canPerformBulkMerge( MergeState mergeState, MatchingReaders matchingReaders, int readerIndex) { if (mergeState.termVectorsReaders[readerIndex] instanceof Lucene90CompressingTermVectorsReader) { final Lucene90CompressingTermVectorsReader reader = (Lucene90CompressingTermVectorsReader) mergeState.termVectorsReaders[readerIndex]; return BULK_MERGE_ENABLED && matchingReaders.matchingReaders[readerIndex] && reader.getCompressionMode() == compressionMode && reader.getChunkSize() == chunkSize && reader.getVersion() == VERSION_CURRENT && reader.getPackedIntsVersion() == PackedInts.VERSION_CURRENT && mergeState.liveDocs[readerIndex] == null && !tooDirty(reader); } return false; } private static class CompressingTermVectorsSub extends DocIDMerger.Sub { final int maxDoc; final int readerIndex; final boolean canPerformBulkMerge; int docID = -1; CompressingTermVectorsSub(MergeState mergeState, boolean canPerformBulkMerge, int readerIndex) { super(mergeState.docMaps[readerIndex]); this.maxDoc = mergeState.maxDocs[readerIndex]; this.readerIndex = readerIndex; this.canPerformBulkMerge = canPerformBulkMerge; } @Override public int nextDoc() { docID++; if (docID == maxDoc) { return NO_MORE_DOCS; } else { return docID; } } } @Override public long ramBytesUsed() { return positionsBuf.length + startOffsetsBuf.length + lengthsBuf.length + payloadLengthsBuf.length + termSuffixes.ramBytesUsed() + payloadBytes.ramBytesUsed() + lastTerm.bytes.length + scratchBuffer.ramBytesUsed(); } @Override public Collection<Accountable> getChildResources() { return List.of(termSuffixes, payloadBytes); } }
googleads/google-ads-java
35,485
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/errors/MediaUploadErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/errors/media_upload_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.errors; /** * <pre> * Container for enum describing possible media uploading errors. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.MediaUploadErrorEnum} */ public final class MediaUploadErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.errors.MediaUploadErrorEnum) MediaUploadErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use MediaUploadErrorEnum.newBuilder() to construct. private MediaUploadErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MediaUploadErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new MediaUploadErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v19_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v19_errors_MediaUploadErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.class, com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.Builder.class); } /** * <pre> * Enum describing possible media uploading errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v19.errors.MediaUploadErrorEnum.MediaUploadError} */ public enum MediaUploadError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The uploaded file is too big. * </pre> * * <code>FILE_TOO_BIG = 2;</code> */ FILE_TOO_BIG(2), /** * <pre> * Image data is unparseable. * </pre> * * <code>UNPARSEABLE_IMAGE = 3;</code> */ UNPARSEABLE_IMAGE(3), /** * <pre> * Animated images are not allowed. * </pre> * * <code>ANIMATED_IMAGE_NOT_ALLOWED = 4;</code> */ ANIMATED_IMAGE_NOT_ALLOWED(4), /** * <pre> * The image or media bundle format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 5;</code> */ FORMAT_NOT_ALLOWED(5), /** * <pre> * Cannot reference URL external to the media bundle. * </pre> * * <code>EXTERNAL_URL_NOT_ALLOWED = 6;</code> */ EXTERNAL_URL_NOT_ALLOWED(6), /** * <pre> * HTML5 ad is trying to reference an asset not in .ZIP file. * </pre> * * <code>INVALID_URL_REFERENCE = 7;</code> */ INVALID_URL_REFERENCE(7), /** * <pre> * The media bundle contains no primary entry. * </pre> * * <code>MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY = 8;</code> */ MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY(8), /** * <pre> * Animation has disallowed visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 9;</code> */ ANIMATED_VISUAL_EFFECT(9), /** * <pre> * Animation longer than the allowed 30 second limit. * </pre> * * <code>ANIMATION_TOO_LONG = 10;</code> */ ANIMATION_TOO_LONG(10), /** * <pre> * The aspect ratio of the image does not match the expected aspect ratios * provided in the asset spec. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 11;</code> */ ASPECT_RATIO_NOT_ALLOWED(11), /** * <pre> * Audio files are not allowed in bundle. * </pre> * * <code>AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 12;</code> */ AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE(12), /** * <pre> * CMYK jpegs are not supported. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 13;</code> */ CMYK_JPEG_NOT_ALLOWED(13), /** * <pre> * Flash movies are not allowed. * </pre> * * <code>FLASH_NOT_ALLOWED = 14;</code> */ FLASH_NOT_ALLOWED(14), /** * <pre> * The frame rate of the video is higher than the allowed 5fps. * </pre> * * <code>FRAME_RATE_TOO_HIGH = 15;</code> */ FRAME_RATE_TOO_HIGH(15), /** * <pre> * ZIP file from Google Web Designer is not published. * </pre> * * <code>GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED = 16;</code> */ GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED(16), /** * <pre> * Image constraints are violated, but more details (like * DIMENSIONS_NOT_ALLOWED or ASPECT_RATIO_NOT_ALLOWED) can not be provided. * This happens when asset spec contains more than one constraint and * criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 17;</code> */ IMAGE_CONSTRAINTS_VIOLATED(17), /** * <pre> * Media bundle data is unrecognizable. * </pre> * * <code>INVALID_MEDIA_BUNDLE = 18;</code> */ INVALID_MEDIA_BUNDLE(18), /** * <pre> * There was a problem with one or more of the media bundle entries. * </pre> * * <code>INVALID_MEDIA_BUNDLE_ENTRY = 19;</code> */ INVALID_MEDIA_BUNDLE_ENTRY(19), /** * <pre> * The asset has an invalid mime type. * </pre> * * <code>INVALID_MIME_TYPE = 20;</code> */ INVALID_MIME_TYPE(20), /** * <pre> * The media bundle contains an invalid asset path. * </pre> * * <code>INVALID_PATH = 21;</code> */ INVALID_PATH(21), /** * <pre> * Image has layout problem. * </pre> * * <code>LAYOUT_PROBLEM = 22;</code> */ LAYOUT_PROBLEM(22), /** * <pre> * An asset had a URL reference that is malformed per RFC 1738 convention. * </pre> * * <code>MALFORMED_URL = 23;</code> */ MALFORMED_URL(23), /** * <pre> * The uploaded media bundle format is not allowed. * </pre> * * <code>MEDIA_BUNDLE_NOT_ALLOWED = 24;</code> */ MEDIA_BUNDLE_NOT_ALLOWED(24), /** * <pre> * The media bundle is not compatible with the asset spec product type. * (For example, Gmail, dynamic remarketing, etc.) * </pre> * * <code>MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE = 25;</code> */ MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE(25), /** * <pre> * A bundle being uploaded that is incompatible with multiple assets for * different reasons. * </pre> * * <code>MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS = 26;</code> */ MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS(26), /** * <pre> * The media bundle contains too many files. * </pre> * * <code>TOO_MANY_FILES_IN_MEDIA_BUNDLE = 27;</code> */ TOO_MANY_FILES_IN_MEDIA_BUNDLE(27), /** * <pre> * Google Web Designer not created for "Google Ads" environment. * </pre> * * <code>UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT = 28;</code> */ UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT(28), /** * <pre> * Unsupported HTML5 feature in HTML5 asset. * </pre> * * <code>UNSUPPORTED_HTML5_FEATURE = 29;</code> */ UNSUPPORTED_HTML5_FEATURE(29), /** * <pre> * URL in HTML5 entry is not SSL compliant. * </pre> * * <code>URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT = 30;</code> */ URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT(30), /** * <pre> * Video file name is longer than the 50 allowed characters. * </pre> * * <code>VIDEO_FILE_NAME_TOO_LONG = 31;</code> */ VIDEO_FILE_NAME_TOO_LONG(31), /** * <pre> * Multiple videos with same name in a bundle. * </pre> * * <code>VIDEO_MULTIPLE_FILES_WITH_SAME_NAME = 32;</code> */ VIDEO_MULTIPLE_FILES_WITH_SAME_NAME(32), /** * <pre> * Videos are not allowed in media bundle. * </pre> * * <code>VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 33;</code> */ VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE(33), /** * <pre> * This type of media cannot be uploaded through the Google Ads API. * </pre> * * <code>CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API = 34;</code> */ CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API(34), /** * <pre> * The dimensions of the image are not allowed. * </pre> * * <code>DIMENSIONS_NOT_ALLOWED = 35;</code> */ DIMENSIONS_NOT_ALLOWED(35), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The uploaded file is too big. * </pre> * * <code>FILE_TOO_BIG = 2;</code> */ public static final int FILE_TOO_BIG_VALUE = 2; /** * <pre> * Image data is unparseable. * </pre> * * <code>UNPARSEABLE_IMAGE = 3;</code> */ public static final int UNPARSEABLE_IMAGE_VALUE = 3; /** * <pre> * Animated images are not allowed. * </pre> * * <code>ANIMATED_IMAGE_NOT_ALLOWED = 4;</code> */ public static final int ANIMATED_IMAGE_NOT_ALLOWED_VALUE = 4; /** * <pre> * The image or media bundle format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 5;</code> */ public static final int FORMAT_NOT_ALLOWED_VALUE = 5; /** * <pre> * Cannot reference URL external to the media bundle. * </pre> * * <code>EXTERNAL_URL_NOT_ALLOWED = 6;</code> */ public static final int EXTERNAL_URL_NOT_ALLOWED_VALUE = 6; /** * <pre> * HTML5 ad is trying to reference an asset not in .ZIP file. * </pre> * * <code>INVALID_URL_REFERENCE = 7;</code> */ public static final int INVALID_URL_REFERENCE_VALUE = 7; /** * <pre> * The media bundle contains no primary entry. * </pre> * * <code>MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY = 8;</code> */ public static final int MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY_VALUE = 8; /** * <pre> * Animation has disallowed visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 9;</code> */ public static final int ANIMATED_VISUAL_EFFECT_VALUE = 9; /** * <pre> * Animation longer than the allowed 30 second limit. * </pre> * * <code>ANIMATION_TOO_LONG = 10;</code> */ public static final int ANIMATION_TOO_LONG_VALUE = 10; /** * <pre> * The aspect ratio of the image does not match the expected aspect ratios * provided in the asset spec. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 11;</code> */ public static final int ASPECT_RATIO_NOT_ALLOWED_VALUE = 11; /** * <pre> * Audio files are not allowed in bundle. * </pre> * * <code>AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 12;</code> */ public static final int AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE_VALUE = 12; /** * <pre> * CMYK jpegs are not supported. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 13;</code> */ public static final int CMYK_JPEG_NOT_ALLOWED_VALUE = 13; /** * <pre> * Flash movies are not allowed. * </pre> * * <code>FLASH_NOT_ALLOWED = 14;</code> */ public static final int FLASH_NOT_ALLOWED_VALUE = 14; /** * <pre> * The frame rate of the video is higher than the allowed 5fps. * </pre> * * <code>FRAME_RATE_TOO_HIGH = 15;</code> */ public static final int FRAME_RATE_TOO_HIGH_VALUE = 15; /** * <pre> * ZIP file from Google Web Designer is not published. * </pre> * * <code>GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED = 16;</code> */ public static final int GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED_VALUE = 16; /** * <pre> * Image constraints are violated, but more details (like * DIMENSIONS_NOT_ALLOWED or ASPECT_RATIO_NOT_ALLOWED) can not be provided. * This happens when asset spec contains more than one constraint and * criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 17;</code> */ public static final int IMAGE_CONSTRAINTS_VIOLATED_VALUE = 17; /** * <pre> * Media bundle data is unrecognizable. * </pre> * * <code>INVALID_MEDIA_BUNDLE = 18;</code> */ public static final int INVALID_MEDIA_BUNDLE_VALUE = 18; /** * <pre> * There was a problem with one or more of the media bundle entries. * </pre> * * <code>INVALID_MEDIA_BUNDLE_ENTRY = 19;</code> */ public static final int INVALID_MEDIA_BUNDLE_ENTRY_VALUE = 19; /** * <pre> * The asset has an invalid mime type. * </pre> * * <code>INVALID_MIME_TYPE = 20;</code> */ public static final int INVALID_MIME_TYPE_VALUE = 20; /** * <pre> * The media bundle contains an invalid asset path. * </pre> * * <code>INVALID_PATH = 21;</code> */ public static final int INVALID_PATH_VALUE = 21; /** * <pre> * Image has layout problem. * </pre> * * <code>LAYOUT_PROBLEM = 22;</code> */ public static final int LAYOUT_PROBLEM_VALUE = 22; /** * <pre> * An asset had a URL reference that is malformed per RFC 1738 convention. * </pre> * * <code>MALFORMED_URL = 23;</code> */ public static final int MALFORMED_URL_VALUE = 23; /** * <pre> * The uploaded media bundle format is not allowed. * </pre> * * <code>MEDIA_BUNDLE_NOT_ALLOWED = 24;</code> */ public static final int MEDIA_BUNDLE_NOT_ALLOWED_VALUE = 24; /** * <pre> * The media bundle is not compatible with the asset spec product type. * (For example, Gmail, dynamic remarketing, etc.) * </pre> * * <code>MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE = 25;</code> */ public static final int MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE_VALUE = 25; /** * <pre> * A bundle being uploaded that is incompatible with multiple assets for * different reasons. * </pre> * * <code>MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS = 26;</code> */ public static final int MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS_VALUE = 26; /** * <pre> * The media bundle contains too many files. * </pre> * * <code>TOO_MANY_FILES_IN_MEDIA_BUNDLE = 27;</code> */ public static final int TOO_MANY_FILES_IN_MEDIA_BUNDLE_VALUE = 27; /** * <pre> * Google Web Designer not created for "Google Ads" environment. * </pre> * * <code>UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT = 28;</code> */ public static final int UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT_VALUE = 28; /** * <pre> * Unsupported HTML5 feature in HTML5 asset. * </pre> * * <code>UNSUPPORTED_HTML5_FEATURE = 29;</code> */ public static final int UNSUPPORTED_HTML5_FEATURE_VALUE = 29; /** * <pre> * URL in HTML5 entry is not SSL compliant. * </pre> * * <code>URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT = 30;</code> */ public static final int URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT_VALUE = 30; /** * <pre> * Video file name is longer than the 50 allowed characters. * </pre> * * <code>VIDEO_FILE_NAME_TOO_LONG = 31;</code> */ public static final int VIDEO_FILE_NAME_TOO_LONG_VALUE = 31; /** * <pre> * Multiple videos with same name in a bundle. * </pre> * * <code>VIDEO_MULTIPLE_FILES_WITH_SAME_NAME = 32;</code> */ public static final int VIDEO_MULTIPLE_FILES_WITH_SAME_NAME_VALUE = 32; /** * <pre> * Videos are not allowed in media bundle. * </pre> * * <code>VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 33;</code> */ public static final int VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE_VALUE = 33; /** * <pre> * This type of media cannot be uploaded through the Google Ads API. * </pre> * * <code>CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API = 34;</code> */ public static final int CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API_VALUE = 34; /** * <pre> * The dimensions of the image are not allowed. * </pre> * * <code>DIMENSIONS_NOT_ALLOWED = 35;</code> */ public static final int DIMENSIONS_NOT_ALLOWED_VALUE = 35; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MediaUploadError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static MediaUploadError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return FILE_TOO_BIG; case 3: return UNPARSEABLE_IMAGE; case 4: return ANIMATED_IMAGE_NOT_ALLOWED; case 5: return FORMAT_NOT_ALLOWED; case 6: return EXTERNAL_URL_NOT_ALLOWED; case 7: return INVALID_URL_REFERENCE; case 8: return MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY; case 9: return ANIMATED_VISUAL_EFFECT; case 10: return ANIMATION_TOO_LONG; case 11: return ASPECT_RATIO_NOT_ALLOWED; case 12: return AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE; case 13: return CMYK_JPEG_NOT_ALLOWED; case 14: return FLASH_NOT_ALLOWED; case 15: return FRAME_RATE_TOO_HIGH; case 16: return GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED; case 17: return IMAGE_CONSTRAINTS_VIOLATED; case 18: return INVALID_MEDIA_BUNDLE; case 19: return INVALID_MEDIA_BUNDLE_ENTRY; case 20: return INVALID_MIME_TYPE; case 21: return INVALID_PATH; case 22: return LAYOUT_PROBLEM; case 23: return MALFORMED_URL; case 24: return MEDIA_BUNDLE_NOT_ALLOWED; case 25: return MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE; case 26: return MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS; case 27: return TOO_MANY_FILES_IN_MEDIA_BUNDLE; case 28: return UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT; case 29: return UNSUPPORTED_HTML5_FEATURE; case 30: return URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT; case 31: return VIDEO_FILE_NAME_TOO_LONG; case 32: return VIDEO_MULTIPLE_FILES_WITH_SAME_NAME; case 33: return VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE; case 34: return CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API; case 35: return DIMENSIONS_NOT_ALLOWED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<MediaUploadError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< MediaUploadError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<MediaUploadError>() { public MediaUploadError findValueByNumber(int number) { return MediaUploadError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final MediaUploadError[] VALUES = values(); public static MediaUploadError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private MediaUploadError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v19.errors.MediaUploadErrorEnum.MediaUploadError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.errors.MediaUploadErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v19.errors.MediaUploadErrorEnum other = (com.google.ads.googleads.v19.errors.MediaUploadErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.errors.MediaUploadErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible media uploading errors. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.MediaUploadErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.errors.MediaUploadErrorEnum) com.google.ads.googleads.v19.errors.MediaUploadErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v19_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v19_errors_MediaUploadErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.class, com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v19_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.errors.MediaUploadErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.errors.MediaUploadErrorEnum build() { com.google.ads.googleads.v19.errors.MediaUploadErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.errors.MediaUploadErrorEnum buildPartial() { com.google.ads.googleads.v19.errors.MediaUploadErrorEnum result = new com.google.ads.googleads.v19.errors.MediaUploadErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.errors.MediaUploadErrorEnum) { return mergeFrom((com.google.ads.googleads.v19.errors.MediaUploadErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.errors.MediaUploadErrorEnum other) { if (other == com.google.ads.googleads.v19.errors.MediaUploadErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.errors.MediaUploadErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.errors.MediaUploadErrorEnum) private static final com.google.ads.googleads.v19.errors.MediaUploadErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.errors.MediaUploadErrorEnum(); } public static com.google.ads.googleads.v19.errors.MediaUploadErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MediaUploadErrorEnum> PARSER = new com.google.protobuf.AbstractParser<MediaUploadErrorEnum>() { @java.lang.Override public MediaUploadErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MediaUploadErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MediaUploadErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.errors.MediaUploadErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,485
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/errors/MediaUploadErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/errors/media_upload_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.errors; /** * <pre> * Container for enum describing possible media uploading errors. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.MediaUploadErrorEnum} */ public final class MediaUploadErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.errors.MediaUploadErrorEnum) MediaUploadErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use MediaUploadErrorEnum.newBuilder() to construct. private MediaUploadErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MediaUploadErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new MediaUploadErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v20_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v20_errors_MediaUploadErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.class, com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.Builder.class); } /** * <pre> * Enum describing possible media uploading errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v20.errors.MediaUploadErrorEnum.MediaUploadError} */ public enum MediaUploadError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The uploaded file is too big. * </pre> * * <code>FILE_TOO_BIG = 2;</code> */ FILE_TOO_BIG(2), /** * <pre> * Image data is unparseable. * </pre> * * <code>UNPARSEABLE_IMAGE = 3;</code> */ UNPARSEABLE_IMAGE(3), /** * <pre> * Animated images are not allowed. * </pre> * * <code>ANIMATED_IMAGE_NOT_ALLOWED = 4;</code> */ ANIMATED_IMAGE_NOT_ALLOWED(4), /** * <pre> * The image or media bundle format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 5;</code> */ FORMAT_NOT_ALLOWED(5), /** * <pre> * Cannot reference URL external to the media bundle. * </pre> * * <code>EXTERNAL_URL_NOT_ALLOWED = 6;</code> */ EXTERNAL_URL_NOT_ALLOWED(6), /** * <pre> * HTML5 ad is trying to reference an asset not in .ZIP file. * </pre> * * <code>INVALID_URL_REFERENCE = 7;</code> */ INVALID_URL_REFERENCE(7), /** * <pre> * The media bundle contains no primary entry. * </pre> * * <code>MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY = 8;</code> */ MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY(8), /** * <pre> * Animation has disallowed visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 9;</code> */ ANIMATED_VISUAL_EFFECT(9), /** * <pre> * Animation longer than the allowed 30 second limit. * </pre> * * <code>ANIMATION_TOO_LONG = 10;</code> */ ANIMATION_TOO_LONG(10), /** * <pre> * The aspect ratio of the image does not match the expected aspect ratios * provided in the asset spec. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 11;</code> */ ASPECT_RATIO_NOT_ALLOWED(11), /** * <pre> * Audio files are not allowed in bundle. * </pre> * * <code>AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 12;</code> */ AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE(12), /** * <pre> * CMYK jpegs are not supported. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 13;</code> */ CMYK_JPEG_NOT_ALLOWED(13), /** * <pre> * Flash movies are not allowed. * </pre> * * <code>FLASH_NOT_ALLOWED = 14;</code> */ FLASH_NOT_ALLOWED(14), /** * <pre> * The frame rate of the video is higher than the allowed 5fps. * </pre> * * <code>FRAME_RATE_TOO_HIGH = 15;</code> */ FRAME_RATE_TOO_HIGH(15), /** * <pre> * ZIP file from Google Web Designer is not published. * </pre> * * <code>GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED = 16;</code> */ GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED(16), /** * <pre> * Image constraints are violated, but more details (like * DIMENSIONS_NOT_ALLOWED or ASPECT_RATIO_NOT_ALLOWED) can not be provided. * This happens when asset spec contains more than one constraint and * criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 17;</code> */ IMAGE_CONSTRAINTS_VIOLATED(17), /** * <pre> * Media bundle data is unrecognizable. * </pre> * * <code>INVALID_MEDIA_BUNDLE = 18;</code> */ INVALID_MEDIA_BUNDLE(18), /** * <pre> * There was a problem with one or more of the media bundle entries. * </pre> * * <code>INVALID_MEDIA_BUNDLE_ENTRY = 19;</code> */ INVALID_MEDIA_BUNDLE_ENTRY(19), /** * <pre> * The asset has an invalid mime type. * </pre> * * <code>INVALID_MIME_TYPE = 20;</code> */ INVALID_MIME_TYPE(20), /** * <pre> * The media bundle contains an invalid asset path. * </pre> * * <code>INVALID_PATH = 21;</code> */ INVALID_PATH(21), /** * <pre> * Image has layout problem. * </pre> * * <code>LAYOUT_PROBLEM = 22;</code> */ LAYOUT_PROBLEM(22), /** * <pre> * An asset had a URL reference that is malformed per RFC 1738 convention. * </pre> * * <code>MALFORMED_URL = 23;</code> */ MALFORMED_URL(23), /** * <pre> * The uploaded media bundle format is not allowed. * </pre> * * <code>MEDIA_BUNDLE_NOT_ALLOWED = 24;</code> */ MEDIA_BUNDLE_NOT_ALLOWED(24), /** * <pre> * The media bundle is not compatible with the asset spec product type. * (For example, Gmail, dynamic remarketing, etc.) * </pre> * * <code>MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE = 25;</code> */ MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE(25), /** * <pre> * A bundle being uploaded that is incompatible with multiple assets for * different reasons. * </pre> * * <code>MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS = 26;</code> */ MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS(26), /** * <pre> * The media bundle contains too many files. * </pre> * * <code>TOO_MANY_FILES_IN_MEDIA_BUNDLE = 27;</code> */ TOO_MANY_FILES_IN_MEDIA_BUNDLE(27), /** * <pre> * Google Web Designer not created for "Google Ads" environment. * </pre> * * <code>UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT = 28;</code> */ UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT(28), /** * <pre> * Unsupported HTML5 feature in HTML5 asset. * </pre> * * <code>UNSUPPORTED_HTML5_FEATURE = 29;</code> */ UNSUPPORTED_HTML5_FEATURE(29), /** * <pre> * URL in HTML5 entry is not SSL compliant. * </pre> * * <code>URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT = 30;</code> */ URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT(30), /** * <pre> * Video file name is longer than the 50 allowed characters. * </pre> * * <code>VIDEO_FILE_NAME_TOO_LONG = 31;</code> */ VIDEO_FILE_NAME_TOO_LONG(31), /** * <pre> * Multiple videos with same name in a bundle. * </pre> * * <code>VIDEO_MULTIPLE_FILES_WITH_SAME_NAME = 32;</code> */ VIDEO_MULTIPLE_FILES_WITH_SAME_NAME(32), /** * <pre> * Videos are not allowed in media bundle. * </pre> * * <code>VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 33;</code> */ VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE(33), /** * <pre> * This type of media cannot be uploaded through the Google Ads API. * </pre> * * <code>CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API = 34;</code> */ CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API(34), /** * <pre> * The dimensions of the image are not allowed. * </pre> * * <code>DIMENSIONS_NOT_ALLOWED = 35;</code> */ DIMENSIONS_NOT_ALLOWED(35), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The uploaded file is too big. * </pre> * * <code>FILE_TOO_BIG = 2;</code> */ public static final int FILE_TOO_BIG_VALUE = 2; /** * <pre> * Image data is unparseable. * </pre> * * <code>UNPARSEABLE_IMAGE = 3;</code> */ public static final int UNPARSEABLE_IMAGE_VALUE = 3; /** * <pre> * Animated images are not allowed. * </pre> * * <code>ANIMATED_IMAGE_NOT_ALLOWED = 4;</code> */ public static final int ANIMATED_IMAGE_NOT_ALLOWED_VALUE = 4; /** * <pre> * The image or media bundle format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 5;</code> */ public static final int FORMAT_NOT_ALLOWED_VALUE = 5; /** * <pre> * Cannot reference URL external to the media bundle. * </pre> * * <code>EXTERNAL_URL_NOT_ALLOWED = 6;</code> */ public static final int EXTERNAL_URL_NOT_ALLOWED_VALUE = 6; /** * <pre> * HTML5 ad is trying to reference an asset not in .ZIP file. * </pre> * * <code>INVALID_URL_REFERENCE = 7;</code> */ public static final int INVALID_URL_REFERENCE_VALUE = 7; /** * <pre> * The media bundle contains no primary entry. * </pre> * * <code>MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY = 8;</code> */ public static final int MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY_VALUE = 8; /** * <pre> * Animation has disallowed visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 9;</code> */ public static final int ANIMATED_VISUAL_EFFECT_VALUE = 9; /** * <pre> * Animation longer than the allowed 30 second limit. * </pre> * * <code>ANIMATION_TOO_LONG = 10;</code> */ public static final int ANIMATION_TOO_LONG_VALUE = 10; /** * <pre> * The aspect ratio of the image does not match the expected aspect ratios * provided in the asset spec. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 11;</code> */ public static final int ASPECT_RATIO_NOT_ALLOWED_VALUE = 11; /** * <pre> * Audio files are not allowed in bundle. * </pre> * * <code>AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 12;</code> */ public static final int AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE_VALUE = 12; /** * <pre> * CMYK jpegs are not supported. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 13;</code> */ public static final int CMYK_JPEG_NOT_ALLOWED_VALUE = 13; /** * <pre> * Flash movies are not allowed. * </pre> * * <code>FLASH_NOT_ALLOWED = 14;</code> */ public static final int FLASH_NOT_ALLOWED_VALUE = 14; /** * <pre> * The frame rate of the video is higher than the allowed 5fps. * </pre> * * <code>FRAME_RATE_TOO_HIGH = 15;</code> */ public static final int FRAME_RATE_TOO_HIGH_VALUE = 15; /** * <pre> * ZIP file from Google Web Designer is not published. * </pre> * * <code>GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED = 16;</code> */ public static final int GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED_VALUE = 16; /** * <pre> * Image constraints are violated, but more details (like * DIMENSIONS_NOT_ALLOWED or ASPECT_RATIO_NOT_ALLOWED) can not be provided. * This happens when asset spec contains more than one constraint and * criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 17;</code> */ public static final int IMAGE_CONSTRAINTS_VIOLATED_VALUE = 17; /** * <pre> * Media bundle data is unrecognizable. * </pre> * * <code>INVALID_MEDIA_BUNDLE = 18;</code> */ public static final int INVALID_MEDIA_BUNDLE_VALUE = 18; /** * <pre> * There was a problem with one or more of the media bundle entries. * </pre> * * <code>INVALID_MEDIA_BUNDLE_ENTRY = 19;</code> */ public static final int INVALID_MEDIA_BUNDLE_ENTRY_VALUE = 19; /** * <pre> * The asset has an invalid mime type. * </pre> * * <code>INVALID_MIME_TYPE = 20;</code> */ public static final int INVALID_MIME_TYPE_VALUE = 20; /** * <pre> * The media bundle contains an invalid asset path. * </pre> * * <code>INVALID_PATH = 21;</code> */ public static final int INVALID_PATH_VALUE = 21; /** * <pre> * Image has layout problem. * </pre> * * <code>LAYOUT_PROBLEM = 22;</code> */ public static final int LAYOUT_PROBLEM_VALUE = 22; /** * <pre> * An asset had a URL reference that is malformed per RFC 1738 convention. * </pre> * * <code>MALFORMED_URL = 23;</code> */ public static final int MALFORMED_URL_VALUE = 23; /** * <pre> * The uploaded media bundle format is not allowed. * </pre> * * <code>MEDIA_BUNDLE_NOT_ALLOWED = 24;</code> */ public static final int MEDIA_BUNDLE_NOT_ALLOWED_VALUE = 24; /** * <pre> * The media bundle is not compatible with the asset spec product type. * (For example, Gmail, dynamic remarketing, etc.) * </pre> * * <code>MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE = 25;</code> */ public static final int MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE_VALUE = 25; /** * <pre> * A bundle being uploaded that is incompatible with multiple assets for * different reasons. * </pre> * * <code>MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS = 26;</code> */ public static final int MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS_VALUE = 26; /** * <pre> * The media bundle contains too many files. * </pre> * * <code>TOO_MANY_FILES_IN_MEDIA_BUNDLE = 27;</code> */ public static final int TOO_MANY_FILES_IN_MEDIA_BUNDLE_VALUE = 27; /** * <pre> * Google Web Designer not created for "Google Ads" environment. * </pre> * * <code>UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT = 28;</code> */ public static final int UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT_VALUE = 28; /** * <pre> * Unsupported HTML5 feature in HTML5 asset. * </pre> * * <code>UNSUPPORTED_HTML5_FEATURE = 29;</code> */ public static final int UNSUPPORTED_HTML5_FEATURE_VALUE = 29; /** * <pre> * URL in HTML5 entry is not SSL compliant. * </pre> * * <code>URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT = 30;</code> */ public static final int URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT_VALUE = 30; /** * <pre> * Video file name is longer than the 50 allowed characters. * </pre> * * <code>VIDEO_FILE_NAME_TOO_LONG = 31;</code> */ public static final int VIDEO_FILE_NAME_TOO_LONG_VALUE = 31; /** * <pre> * Multiple videos with same name in a bundle. * </pre> * * <code>VIDEO_MULTIPLE_FILES_WITH_SAME_NAME = 32;</code> */ public static final int VIDEO_MULTIPLE_FILES_WITH_SAME_NAME_VALUE = 32; /** * <pre> * Videos are not allowed in media bundle. * </pre> * * <code>VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 33;</code> */ public static final int VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE_VALUE = 33; /** * <pre> * This type of media cannot be uploaded through the Google Ads API. * </pre> * * <code>CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API = 34;</code> */ public static final int CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API_VALUE = 34; /** * <pre> * The dimensions of the image are not allowed. * </pre> * * <code>DIMENSIONS_NOT_ALLOWED = 35;</code> */ public static final int DIMENSIONS_NOT_ALLOWED_VALUE = 35; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MediaUploadError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static MediaUploadError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return FILE_TOO_BIG; case 3: return UNPARSEABLE_IMAGE; case 4: return ANIMATED_IMAGE_NOT_ALLOWED; case 5: return FORMAT_NOT_ALLOWED; case 6: return EXTERNAL_URL_NOT_ALLOWED; case 7: return INVALID_URL_REFERENCE; case 8: return MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY; case 9: return ANIMATED_VISUAL_EFFECT; case 10: return ANIMATION_TOO_LONG; case 11: return ASPECT_RATIO_NOT_ALLOWED; case 12: return AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE; case 13: return CMYK_JPEG_NOT_ALLOWED; case 14: return FLASH_NOT_ALLOWED; case 15: return FRAME_RATE_TOO_HIGH; case 16: return GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED; case 17: return IMAGE_CONSTRAINTS_VIOLATED; case 18: return INVALID_MEDIA_BUNDLE; case 19: return INVALID_MEDIA_BUNDLE_ENTRY; case 20: return INVALID_MIME_TYPE; case 21: return INVALID_PATH; case 22: return LAYOUT_PROBLEM; case 23: return MALFORMED_URL; case 24: return MEDIA_BUNDLE_NOT_ALLOWED; case 25: return MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE; case 26: return MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS; case 27: return TOO_MANY_FILES_IN_MEDIA_BUNDLE; case 28: return UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT; case 29: return UNSUPPORTED_HTML5_FEATURE; case 30: return URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT; case 31: return VIDEO_FILE_NAME_TOO_LONG; case 32: return VIDEO_MULTIPLE_FILES_WITH_SAME_NAME; case 33: return VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE; case 34: return CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API; case 35: return DIMENSIONS_NOT_ALLOWED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<MediaUploadError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< MediaUploadError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<MediaUploadError>() { public MediaUploadError findValueByNumber(int number) { return MediaUploadError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final MediaUploadError[] VALUES = values(); public static MediaUploadError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private MediaUploadError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v20.errors.MediaUploadErrorEnum.MediaUploadError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.errors.MediaUploadErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v20.errors.MediaUploadErrorEnum other = (com.google.ads.googleads.v20.errors.MediaUploadErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.errors.MediaUploadErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible media uploading errors. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.MediaUploadErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.errors.MediaUploadErrorEnum) com.google.ads.googleads.v20.errors.MediaUploadErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v20_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v20_errors_MediaUploadErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.class, com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v20_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.errors.MediaUploadErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.errors.MediaUploadErrorEnum build() { com.google.ads.googleads.v20.errors.MediaUploadErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.errors.MediaUploadErrorEnum buildPartial() { com.google.ads.googleads.v20.errors.MediaUploadErrorEnum result = new com.google.ads.googleads.v20.errors.MediaUploadErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.errors.MediaUploadErrorEnum) { return mergeFrom((com.google.ads.googleads.v20.errors.MediaUploadErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.errors.MediaUploadErrorEnum other) { if (other == com.google.ads.googleads.v20.errors.MediaUploadErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.errors.MediaUploadErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.errors.MediaUploadErrorEnum) private static final com.google.ads.googleads.v20.errors.MediaUploadErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.errors.MediaUploadErrorEnum(); } public static com.google.ads.googleads.v20.errors.MediaUploadErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MediaUploadErrorEnum> PARSER = new com.google.protobuf.AbstractParser<MediaUploadErrorEnum>() { @java.lang.Override public MediaUploadErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MediaUploadErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MediaUploadErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.errors.MediaUploadErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,485
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/errors/MediaUploadErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/errors/media_upload_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.errors; /** * <pre> * Container for enum describing possible media uploading errors. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.MediaUploadErrorEnum} */ public final class MediaUploadErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.errors.MediaUploadErrorEnum) MediaUploadErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use MediaUploadErrorEnum.newBuilder() to construct. private MediaUploadErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MediaUploadErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new MediaUploadErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v21_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v21_errors_MediaUploadErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.class, com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.Builder.class); } /** * <pre> * Enum describing possible media uploading errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v21.errors.MediaUploadErrorEnum.MediaUploadError} */ public enum MediaUploadError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The uploaded file is too big. * </pre> * * <code>FILE_TOO_BIG = 2;</code> */ FILE_TOO_BIG(2), /** * <pre> * Image data is unparseable. * </pre> * * <code>UNPARSEABLE_IMAGE = 3;</code> */ UNPARSEABLE_IMAGE(3), /** * <pre> * Animated images are not allowed. * </pre> * * <code>ANIMATED_IMAGE_NOT_ALLOWED = 4;</code> */ ANIMATED_IMAGE_NOT_ALLOWED(4), /** * <pre> * The image or media bundle format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 5;</code> */ FORMAT_NOT_ALLOWED(5), /** * <pre> * Cannot reference URL external to the media bundle. * </pre> * * <code>EXTERNAL_URL_NOT_ALLOWED = 6;</code> */ EXTERNAL_URL_NOT_ALLOWED(6), /** * <pre> * HTML5 ad is trying to reference an asset not in .ZIP file. * </pre> * * <code>INVALID_URL_REFERENCE = 7;</code> */ INVALID_URL_REFERENCE(7), /** * <pre> * The media bundle contains no primary entry. * </pre> * * <code>MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY = 8;</code> */ MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY(8), /** * <pre> * Animation has disallowed visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 9;</code> */ ANIMATED_VISUAL_EFFECT(9), /** * <pre> * Animation longer than the allowed 30 second limit. * </pre> * * <code>ANIMATION_TOO_LONG = 10;</code> */ ANIMATION_TOO_LONG(10), /** * <pre> * The aspect ratio of the image does not match the expected aspect ratios * provided in the asset spec. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 11;</code> */ ASPECT_RATIO_NOT_ALLOWED(11), /** * <pre> * Audio files are not allowed in bundle. * </pre> * * <code>AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 12;</code> */ AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE(12), /** * <pre> * CMYK jpegs are not supported. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 13;</code> */ CMYK_JPEG_NOT_ALLOWED(13), /** * <pre> * Flash movies are not allowed. * </pre> * * <code>FLASH_NOT_ALLOWED = 14;</code> */ FLASH_NOT_ALLOWED(14), /** * <pre> * The frame rate of the video is higher than the allowed 5fps. * </pre> * * <code>FRAME_RATE_TOO_HIGH = 15;</code> */ FRAME_RATE_TOO_HIGH(15), /** * <pre> * ZIP file from Google Web Designer is not published. * </pre> * * <code>GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED = 16;</code> */ GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED(16), /** * <pre> * Image constraints are violated, but more details (like * DIMENSIONS_NOT_ALLOWED or ASPECT_RATIO_NOT_ALLOWED) can not be provided. * This happens when asset spec contains more than one constraint and * criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 17;</code> */ IMAGE_CONSTRAINTS_VIOLATED(17), /** * <pre> * Media bundle data is unrecognizable. * </pre> * * <code>INVALID_MEDIA_BUNDLE = 18;</code> */ INVALID_MEDIA_BUNDLE(18), /** * <pre> * There was a problem with one or more of the media bundle entries. * </pre> * * <code>INVALID_MEDIA_BUNDLE_ENTRY = 19;</code> */ INVALID_MEDIA_BUNDLE_ENTRY(19), /** * <pre> * The asset has an invalid mime type. * </pre> * * <code>INVALID_MIME_TYPE = 20;</code> */ INVALID_MIME_TYPE(20), /** * <pre> * The media bundle contains an invalid asset path. * </pre> * * <code>INVALID_PATH = 21;</code> */ INVALID_PATH(21), /** * <pre> * Image has layout problem. * </pre> * * <code>LAYOUT_PROBLEM = 22;</code> */ LAYOUT_PROBLEM(22), /** * <pre> * An asset had a URL reference that is malformed per RFC 1738 convention. * </pre> * * <code>MALFORMED_URL = 23;</code> */ MALFORMED_URL(23), /** * <pre> * The uploaded media bundle format is not allowed. * </pre> * * <code>MEDIA_BUNDLE_NOT_ALLOWED = 24;</code> */ MEDIA_BUNDLE_NOT_ALLOWED(24), /** * <pre> * The media bundle is not compatible with the asset spec product type. * (For example, Gmail, dynamic remarketing, etc.) * </pre> * * <code>MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE = 25;</code> */ MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE(25), /** * <pre> * A bundle being uploaded that is incompatible with multiple assets for * different reasons. * </pre> * * <code>MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS = 26;</code> */ MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS(26), /** * <pre> * The media bundle contains too many files. * </pre> * * <code>TOO_MANY_FILES_IN_MEDIA_BUNDLE = 27;</code> */ TOO_MANY_FILES_IN_MEDIA_BUNDLE(27), /** * <pre> * Google Web Designer not created for "Google Ads" environment. * </pre> * * <code>UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT = 28;</code> */ UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT(28), /** * <pre> * Unsupported HTML5 feature in HTML5 asset. * </pre> * * <code>UNSUPPORTED_HTML5_FEATURE = 29;</code> */ UNSUPPORTED_HTML5_FEATURE(29), /** * <pre> * URL in HTML5 entry is not SSL compliant. * </pre> * * <code>URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT = 30;</code> */ URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT(30), /** * <pre> * Video file name is longer than the 50 allowed characters. * </pre> * * <code>VIDEO_FILE_NAME_TOO_LONG = 31;</code> */ VIDEO_FILE_NAME_TOO_LONG(31), /** * <pre> * Multiple videos with same name in a bundle. * </pre> * * <code>VIDEO_MULTIPLE_FILES_WITH_SAME_NAME = 32;</code> */ VIDEO_MULTIPLE_FILES_WITH_SAME_NAME(32), /** * <pre> * Videos are not allowed in media bundle. * </pre> * * <code>VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 33;</code> */ VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE(33), /** * <pre> * This type of media cannot be uploaded through the Google Ads API. * </pre> * * <code>CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API = 34;</code> */ CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API(34), /** * <pre> * The dimensions of the image are not allowed. * </pre> * * <code>DIMENSIONS_NOT_ALLOWED = 35;</code> */ DIMENSIONS_NOT_ALLOWED(35), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The uploaded file is too big. * </pre> * * <code>FILE_TOO_BIG = 2;</code> */ public static final int FILE_TOO_BIG_VALUE = 2; /** * <pre> * Image data is unparseable. * </pre> * * <code>UNPARSEABLE_IMAGE = 3;</code> */ public static final int UNPARSEABLE_IMAGE_VALUE = 3; /** * <pre> * Animated images are not allowed. * </pre> * * <code>ANIMATED_IMAGE_NOT_ALLOWED = 4;</code> */ public static final int ANIMATED_IMAGE_NOT_ALLOWED_VALUE = 4; /** * <pre> * The image or media bundle format is not allowed. * </pre> * * <code>FORMAT_NOT_ALLOWED = 5;</code> */ public static final int FORMAT_NOT_ALLOWED_VALUE = 5; /** * <pre> * Cannot reference URL external to the media bundle. * </pre> * * <code>EXTERNAL_URL_NOT_ALLOWED = 6;</code> */ public static final int EXTERNAL_URL_NOT_ALLOWED_VALUE = 6; /** * <pre> * HTML5 ad is trying to reference an asset not in .ZIP file. * </pre> * * <code>INVALID_URL_REFERENCE = 7;</code> */ public static final int INVALID_URL_REFERENCE_VALUE = 7; /** * <pre> * The media bundle contains no primary entry. * </pre> * * <code>MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY = 8;</code> */ public static final int MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY_VALUE = 8; /** * <pre> * Animation has disallowed visual effects. * </pre> * * <code>ANIMATED_VISUAL_EFFECT = 9;</code> */ public static final int ANIMATED_VISUAL_EFFECT_VALUE = 9; /** * <pre> * Animation longer than the allowed 30 second limit. * </pre> * * <code>ANIMATION_TOO_LONG = 10;</code> */ public static final int ANIMATION_TOO_LONG_VALUE = 10; /** * <pre> * The aspect ratio of the image does not match the expected aspect ratios * provided in the asset spec. * </pre> * * <code>ASPECT_RATIO_NOT_ALLOWED = 11;</code> */ public static final int ASPECT_RATIO_NOT_ALLOWED_VALUE = 11; /** * <pre> * Audio files are not allowed in bundle. * </pre> * * <code>AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 12;</code> */ public static final int AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE_VALUE = 12; /** * <pre> * CMYK jpegs are not supported. * </pre> * * <code>CMYK_JPEG_NOT_ALLOWED = 13;</code> */ public static final int CMYK_JPEG_NOT_ALLOWED_VALUE = 13; /** * <pre> * Flash movies are not allowed. * </pre> * * <code>FLASH_NOT_ALLOWED = 14;</code> */ public static final int FLASH_NOT_ALLOWED_VALUE = 14; /** * <pre> * The frame rate of the video is higher than the allowed 5fps. * </pre> * * <code>FRAME_RATE_TOO_HIGH = 15;</code> */ public static final int FRAME_RATE_TOO_HIGH_VALUE = 15; /** * <pre> * ZIP file from Google Web Designer is not published. * </pre> * * <code>GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED = 16;</code> */ public static final int GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED_VALUE = 16; /** * <pre> * Image constraints are violated, but more details (like * DIMENSIONS_NOT_ALLOWED or ASPECT_RATIO_NOT_ALLOWED) can not be provided. * This happens when asset spec contains more than one constraint and * criteria of different constraints are violated. * </pre> * * <code>IMAGE_CONSTRAINTS_VIOLATED = 17;</code> */ public static final int IMAGE_CONSTRAINTS_VIOLATED_VALUE = 17; /** * <pre> * Media bundle data is unrecognizable. * </pre> * * <code>INVALID_MEDIA_BUNDLE = 18;</code> */ public static final int INVALID_MEDIA_BUNDLE_VALUE = 18; /** * <pre> * There was a problem with one or more of the media bundle entries. * </pre> * * <code>INVALID_MEDIA_BUNDLE_ENTRY = 19;</code> */ public static final int INVALID_MEDIA_BUNDLE_ENTRY_VALUE = 19; /** * <pre> * The asset has an invalid mime type. * </pre> * * <code>INVALID_MIME_TYPE = 20;</code> */ public static final int INVALID_MIME_TYPE_VALUE = 20; /** * <pre> * The media bundle contains an invalid asset path. * </pre> * * <code>INVALID_PATH = 21;</code> */ public static final int INVALID_PATH_VALUE = 21; /** * <pre> * Image has layout problem. * </pre> * * <code>LAYOUT_PROBLEM = 22;</code> */ public static final int LAYOUT_PROBLEM_VALUE = 22; /** * <pre> * An asset had a URL reference that is malformed per RFC 1738 convention. * </pre> * * <code>MALFORMED_URL = 23;</code> */ public static final int MALFORMED_URL_VALUE = 23; /** * <pre> * The uploaded media bundle format is not allowed. * </pre> * * <code>MEDIA_BUNDLE_NOT_ALLOWED = 24;</code> */ public static final int MEDIA_BUNDLE_NOT_ALLOWED_VALUE = 24; /** * <pre> * The media bundle is not compatible with the asset spec product type. * (For example, Gmail, dynamic remarketing, etc.) * </pre> * * <code>MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE = 25;</code> */ public static final int MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE_VALUE = 25; /** * <pre> * A bundle being uploaded that is incompatible with multiple assets for * different reasons. * </pre> * * <code>MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS = 26;</code> */ public static final int MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS_VALUE = 26; /** * <pre> * The media bundle contains too many files. * </pre> * * <code>TOO_MANY_FILES_IN_MEDIA_BUNDLE = 27;</code> */ public static final int TOO_MANY_FILES_IN_MEDIA_BUNDLE_VALUE = 27; /** * <pre> * Google Web Designer not created for "Google Ads" environment. * </pre> * * <code>UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT = 28;</code> */ public static final int UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT_VALUE = 28; /** * <pre> * Unsupported HTML5 feature in HTML5 asset. * </pre> * * <code>UNSUPPORTED_HTML5_FEATURE = 29;</code> */ public static final int UNSUPPORTED_HTML5_FEATURE_VALUE = 29; /** * <pre> * URL in HTML5 entry is not SSL compliant. * </pre> * * <code>URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT = 30;</code> */ public static final int URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT_VALUE = 30; /** * <pre> * Video file name is longer than the 50 allowed characters. * </pre> * * <code>VIDEO_FILE_NAME_TOO_LONG = 31;</code> */ public static final int VIDEO_FILE_NAME_TOO_LONG_VALUE = 31; /** * <pre> * Multiple videos with same name in a bundle. * </pre> * * <code>VIDEO_MULTIPLE_FILES_WITH_SAME_NAME = 32;</code> */ public static final int VIDEO_MULTIPLE_FILES_WITH_SAME_NAME_VALUE = 32; /** * <pre> * Videos are not allowed in media bundle. * </pre> * * <code>VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE = 33;</code> */ public static final int VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE_VALUE = 33; /** * <pre> * This type of media cannot be uploaded through the Google Ads API. * </pre> * * <code>CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API = 34;</code> */ public static final int CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API_VALUE = 34; /** * <pre> * The dimensions of the image are not allowed. * </pre> * * <code>DIMENSIONS_NOT_ALLOWED = 35;</code> */ public static final int DIMENSIONS_NOT_ALLOWED_VALUE = 35; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MediaUploadError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static MediaUploadError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return FILE_TOO_BIG; case 3: return UNPARSEABLE_IMAGE; case 4: return ANIMATED_IMAGE_NOT_ALLOWED; case 5: return FORMAT_NOT_ALLOWED; case 6: return EXTERNAL_URL_NOT_ALLOWED; case 7: return INVALID_URL_REFERENCE; case 8: return MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY; case 9: return ANIMATED_VISUAL_EFFECT; case 10: return ANIMATION_TOO_LONG; case 11: return ASPECT_RATIO_NOT_ALLOWED; case 12: return AUDIO_NOT_ALLOWED_IN_MEDIA_BUNDLE; case 13: return CMYK_JPEG_NOT_ALLOWED; case 14: return FLASH_NOT_ALLOWED; case 15: return FRAME_RATE_TOO_HIGH; case 16: return GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED; case 17: return IMAGE_CONSTRAINTS_VIOLATED; case 18: return INVALID_MEDIA_BUNDLE; case 19: return INVALID_MEDIA_BUNDLE_ENTRY; case 20: return INVALID_MIME_TYPE; case 21: return INVALID_PATH; case 22: return LAYOUT_PROBLEM; case 23: return MALFORMED_URL; case 24: return MEDIA_BUNDLE_NOT_ALLOWED; case 25: return MEDIA_BUNDLE_NOT_COMPATIBLE_TO_PRODUCT_TYPE; case 26: return MEDIA_BUNDLE_REJECTED_BY_MULTIPLE_ASSET_SPECS; case 27: return TOO_MANY_FILES_IN_MEDIA_BUNDLE; case 28: return UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT; case 29: return UNSUPPORTED_HTML5_FEATURE; case 30: return URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT; case 31: return VIDEO_FILE_NAME_TOO_LONG; case 32: return VIDEO_MULTIPLE_FILES_WITH_SAME_NAME; case 33: return VIDEO_NOT_ALLOWED_IN_MEDIA_BUNDLE; case 34: return CANNOT_UPLOAD_MEDIA_TYPE_THROUGH_API; case 35: return DIMENSIONS_NOT_ALLOWED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<MediaUploadError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< MediaUploadError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<MediaUploadError>() { public MediaUploadError findValueByNumber(int number) { return MediaUploadError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final MediaUploadError[] VALUES = values(); public static MediaUploadError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private MediaUploadError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v21.errors.MediaUploadErrorEnum.MediaUploadError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.errors.MediaUploadErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v21.errors.MediaUploadErrorEnum other = (com.google.ads.googleads.v21.errors.MediaUploadErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.errors.MediaUploadErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible media uploading errors. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.MediaUploadErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.errors.MediaUploadErrorEnum) com.google.ads.googleads.v21.errors.MediaUploadErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v21_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v21_errors_MediaUploadErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.class, com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.errors.MediaUploadErrorProto.internal_static_google_ads_googleads_v21_errors_MediaUploadErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.errors.MediaUploadErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.errors.MediaUploadErrorEnum build() { com.google.ads.googleads.v21.errors.MediaUploadErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.errors.MediaUploadErrorEnum buildPartial() { com.google.ads.googleads.v21.errors.MediaUploadErrorEnum result = new com.google.ads.googleads.v21.errors.MediaUploadErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.errors.MediaUploadErrorEnum) { return mergeFrom((com.google.ads.googleads.v21.errors.MediaUploadErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.errors.MediaUploadErrorEnum other) { if (other == com.google.ads.googleads.v21.errors.MediaUploadErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.errors.MediaUploadErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.errors.MediaUploadErrorEnum) private static final com.google.ads.googleads.v21.errors.MediaUploadErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.errors.MediaUploadErrorEnum(); } public static com.google.ads.googleads.v21.errors.MediaUploadErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MediaUploadErrorEnum> PARSER = new com.google.protobuf.AbstractParser<MediaUploadErrorEnum>() { @java.lang.Override public MediaUploadErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MediaUploadErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MediaUploadErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.errors.MediaUploadErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ozone
35,831
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestBlockOutputStreamWithFailures.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.client.rpc; import static org.apache.hadoop.hdds.scm.client.HddsClientUtils.checkForException; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.BLOCK_SIZE; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.BUCKET; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.CHUNK_SIZE; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.FLUSH_SIZE; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.MAX_FLUSH_SIZE; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.VOLUME; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.createCluster; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.createKey; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.getKeyName; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.newClient; import static org.apache.hadoop.ozone.client.rpc.TestBlockOutputStream.newClientConfig; import static org.apache.hadoop.ozone.container.TestHelper.validateData; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.stream.Stream; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.RandomUtils; import org.apache.hadoop.hdds.client.ReplicationFactor; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.scm.OzoneClientConfig; import org.apache.hadoop.hdds.scm.XceiverClientRatis; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerNotOpenException; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.storage.RatisBlockOutputStream; import org.apache.hadoop.ozone.HddsDatanodeService; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.client.OzoneClient; import org.apache.hadoop.ozone.client.io.KeyOutputStream; import org.apache.hadoop.ozone.client.io.OzoneOutputStream; import org.apache.hadoop.ozone.container.TestHelper; import org.apache.ozone.test.tag.Flaky; import org.apache.ratis.protocol.exceptions.GroupMismatchException; import org.apache.ratis.protocol.exceptions.RaftRetryFailureException; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; /** * Tests failure detection and handling in BlockOutputStream Class. */ @TestInstance(TestInstance.Lifecycle.PER_CLASS) @Flaky("HDDS-11849") class TestBlockOutputStreamWithFailures { private MiniOzoneCluster cluster; @BeforeAll void init() throws Exception { cluster = createCluster(25); } @AfterAll void shutdown() { if (cluster != null) { cluster.shutdown(); } } private static Stream<Arguments> clientParameters() { return Stream.of( Arguments.of(true, true), Arguments.of(true, false), Arguments.of(false, true), Arguments.of(false, false) ); } @ParameterizedTest @MethodSource("clientParameters") void testContainerClose(boolean flushDelay, boolean enablePiggybacking) throws Exception { OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking); try (OzoneClient client = newClient(cluster.getConf(), config)) { testWatchForCommitWithCloseContainerException(client); testWatchForCommitWithSingleNodeRatis(client); testWriteMoreThanMaxFlushSize(client); testExceptionDuringClose(client); } } private void testWatchForCommitWithCloseContainerException(OzoneClient client) throws Exception { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName); int dataLength = MAX_FLUSH_SIZE + CHUNK_SIZE; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); assertEquals(1, keyOutputStream.getStreamEntries().size()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); // we have just written data more than flush Size(2 chunks), at this time // buffer pool will have 4 buffers allocated worth of chunk size assertEquals(4, blockOutputStream.getBufferPool().getSize()); // writtenDataLength as well flushedDataLength will be updated here assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(MAX_FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength()); // since data equals to maxBufferSize is written, this will be a blocking // call and hence will wait for atleast flushSize worth of data to get // ack'd by all servers right here assertThat(blockOutputStream.getTotalAckDataLength()) .isGreaterThanOrEqualTo(FLUSH_SIZE); // watchForCommit will clean up atleast one entry from the map where each // entry corresponds to flushSize worth of data assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(1); // This will flush the data and update the flush length and the map. key.flush(); // flush is a sync call, all pending operations will complete // Since the data in the buffer is already flushed, flush here will have // no impact on the counters and data structures assertEquals(4, blockOutputStream.getBufferPool().getSize()); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); // flush will make sure one more entry gets updated in the map assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(2); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(3, raftClient.getCommitInfoMap().size()); // Close the containers on the Datanode and write more data TestHelper.waitForContainerClose(key, cluster); key.write(data1); // As a part of handling the exception, 4 failed writeChunks will be // rewritten plus one partial chunk plus two putBlocks for flushSize // and one flush for partial chunk key.flush(); assertEquals(2, keyOutputStream.getStreamEntries().size()); assertInstanceOf(ContainerNotOpenException.class, checkForException(blockOutputStream.getIoException())); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); // commitInfoMap will remain intact as there is no server failure assertEquals(3, raftClient.getCommitInfoMap().size()); // now close the stream, It will update ack length after watchForCommit key.close(); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(dataLength, blockOutputStream.getTotalAckDataLength()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(0, keyOutputStream.getStreamEntries().size()); // Written the same data twice byte[] bytes = ArrayUtils.addAll(data1, data1); validateData(keyName, bytes, client.getObjectStore(), VOLUME, BUCKET); } @ParameterizedTest @MethodSource("clientParameters") void testWatchForCommitDatanodeFailure(boolean flushDelay, boolean enablePiggybacking) throws Exception { OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking); try (OzoneClient client = newClient(cluster.getConf(), config)) { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName); int dataLength = MAX_FLUSH_SIZE + CHUNK_SIZE; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); // since its hitting the full bufferCondition, it will call watchForCommit // and completes at least putBlock for first flushSize worth of data KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); assertEquals(1, keyOutputStream.getStreamEntries().size()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); // we have just written data more than flush Size(2 chunks), at this time // buffer pool will have 3 buffers allocated worth of chunk size assertEquals(4, blockOutputStream.getBufferPool().getSize()); // writtenDataLength as well flushedDataLength will be updated here assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); // since data written is still less than flushLength, flushLength will // still be 0. assertEquals(MAX_FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength()); // since data equals to maxBufferSize is written, this will be a blocking // call and hence will wait for atleast flushSize worth of data to get // ack'd by all servers right here assertThat(blockOutputStream.getTotalAckDataLength()) .isGreaterThanOrEqualTo(FLUSH_SIZE); // watchForCommit will clean up atleast flushSize worth of data buffer // where each entry corresponds to flushSize worth of data assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(2); // This will flush the data and update the flush length and the map. key.flush(); // Since the data in the buffer is already flushed, flush here will have // no impact on the counters and data structures assertEquals(4, blockOutputStream.getBufferPool().getSize()); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); // flush will make sure one more entry gets updated in the map assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(3, raftClient.getCommitInfoMap().size()); Pipeline pipeline = raftClient.getPipeline(); stopAndRemove(pipeline.getNodes().get(0)); // again write data with more than max buffer limit. This will call // watchForCommit again. Since the commit will happen 2 way, the // commitInfoMap will get updated for servers which are alive key.write(data1); key.flush(); assertEquals(2, keyOutputStream.getStreamEntries().size()); // now close the stream, It will update ack length after watchForCommit key.close(); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(0, keyOutputStream.getStreamEntries().size()); // Written the same data twice byte[] bytes = ArrayUtils.addAll(data1, data1); validateData(keyName, bytes, client.getObjectStore(), VOLUME, BUCKET); } } @ParameterizedTest @MethodSource("clientParameters") void test2DatanodesFailure(boolean flushDelay, boolean enablePiggybacking) throws Exception { OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking); try (OzoneClient client = newClient(cluster.getConf(), config)) { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName); int dataLength = MAX_FLUSH_SIZE + CHUNK_SIZE; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); // since its hitting the full bufferCondition, it will call watchForCommit // and completes atleast putBlock for first flushSize worth of data KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); // we have just written data more than flush Size(2 chunks), at this time // buffer pool will have 3 buffers allocated worth of chunk size assertEquals(4, blockOutputStream.getBufferPool().getSize()); // writtenDataLength as well flushedDataLength will be updated here assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(MAX_FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength()); // since data equals to maxBufferSize is written, this will be a blocking // call and hence will wait for atleast flushSize worth of data to get // acked by all servers right here assertThat(blockOutputStream.getTotalAckDataLength()) .isGreaterThanOrEqualTo(FLUSH_SIZE); // watchForCommit will clean up atleast one entry from the map where each // entry corresponds to flushSize worth of data assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(1); // This will flush the data and update the flush length and the map. key.flush(); // Since the data in the buffer is already flushed, flush here will have // no impact on the counters and data structures assertEquals(4, blockOutputStream.getBufferPool().getSize()); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); // flush will make sure one more entry gets updated in the map assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(2); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(3, raftClient.getCommitInfoMap().size()); Pipeline pipeline = raftClient.getPipeline(); stopAndRemove(pipeline.getNodes().get(0)); stopAndRemove(pipeline.getNodes().get(1)); // again write data with more than max buffer limit. This will call // watchForCommit again. Since the commit will happen 2 way, the // commitInfoMap will get updated for servers which are alive // 4 writeChunks = maxFlushSize + 2 putBlocks will be discarded here // once exception is hit key.write(data1); // As a part of handling the exception, 4 failed writeChunks will be // rewritten plus one partial chunk plus two putBlocks for flushSize // and one flush for partial chunk key.flush(); Throwable ioException = checkForException( blockOutputStream.getIoException()); // Since, 2 datanodes went down, // a) if the pipeline gets destroyed quickly it will hit // GroupMismatchException. // b) will hit close container exception if the container is closed // but pipeline is still not destroyed. // c) will fail with RaftRetryFailureException if the leader election // did not finish before the request retry count finishes. assertTrue(ioException instanceof RaftRetryFailureException || ioException instanceof GroupMismatchException || ioException instanceof ContainerNotOpenException); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); // now close the stream, It will update ack length after watchForCommit key.close(); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(dataLength, blockOutputStream.getTotalAckDataLength()); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(0, keyOutputStream.getLocationInfoList().size()); validateData(keyName, data1, client.getObjectStore(), VOLUME, BUCKET); } } private void testWriteMoreThanMaxFlushSize(OzoneClient client) throws Exception { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName); int dataLength = MAX_FLUSH_SIZE + CHUNK_SIZE; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); assertEquals(1, keyOutputStream.getStreamEntries().size()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); assertThat(blockOutputStream.getBufferPool().getSize()) .isLessThanOrEqualTo(4); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(400, blockOutputStream.getTotalDataFlushedLength()); // This will flush the data and update the flush length and the map. key.flush(); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(3, raftClient.getCommitInfoMap().size()); // Close the containers on the Datanode and write more data TestHelper.waitForContainerClose(key, cluster); key.write(data1); // As a part of handling the exception, 2 failed writeChunks will be // rewritten plus 1 putBlocks for flush // and one flush for partial chunk key.flush(); assertInstanceOf(ContainerNotOpenException.class, checkForException(blockOutputStream.getIoException())); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); // commitInfoMap will remain intact as there is no server failure assertEquals(3, raftClient.getCommitInfoMap().size()); // now close the stream, It will update ack length after watchForCommit key.close(); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(dataLength, blockOutputStream.getTotalAckDataLength()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(0, keyOutputStream.getLocationInfoList().size()); // Written the same data twice byte[] bytes = ArrayUtils.addAll(data1, data1); validateData(keyName, bytes, client.getObjectStore(), VOLUME, BUCKET); } private void testExceptionDuringClose(OzoneClient client) throws Exception { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName); int dataLength = 167; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); assertEquals(1, keyOutputStream.getStreamEntries().size()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); assertThat(blockOutputStream.getBufferPool().getSize()) .isLessThanOrEqualTo(2); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(0, blockOutputStream.getTotalDataFlushedLength()); assertEquals(0, blockOutputStream.getTotalAckDataLength()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); // This will flush the data and update the flush length and the map. key.flush(); // Since the data in the buffer is already flushed, flush here will have // no impact on the counters and data structures assertThat(blockOutputStream.getBufferPool().getSize()) .isLessThanOrEqualTo(2); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); // flush will make sure one more entry gets updated in the map assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(3, raftClient.getCommitInfoMap().size()); // Close the containers on the Datanode and write more data TestHelper.waitForContainerClose(key, cluster); key.write(data1); // commitInfoMap will remain intact as there is no server failure assertEquals(3, raftClient.getCommitInfoMap().size()); // now close the stream, It will hit exception key.close(); assertInstanceOf(ContainerNotOpenException.class, checkForException(blockOutputStream.getIoException())); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(dataLength, blockOutputStream.getTotalAckDataLength()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(0, keyOutputStream.getStreamEntries().size()); // Written the same data twice byte[] bytes = ArrayUtils.addAll(data1, data1); validateData(keyName, bytes, client.getObjectStore(), VOLUME, BUCKET); } private void testWatchForCommitWithSingleNodeRatis(OzoneClient client) throws Exception { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName, 0, ReplicationFactor.ONE); int dataLength = MAX_FLUSH_SIZE + CHUNK_SIZE; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); assertEquals(1, keyOutputStream.getStreamEntries().size()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); // we have just written data more than flush Size(2 chunks), at this time // buffer pool will have up to 4 buffers allocated worth of chunk size assertThat(blockOutputStream.getBufferPool().getSize()) .isLessThanOrEqualTo(4); // writtenDataLength as well flushedDataLength will be updated here assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(MAX_FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength()); // since data equals to maxBufferSize is written, this will be a blocking // call and hence will wait for atleast flushSize worth of data to get // ack'd by all servers right here assertThat(blockOutputStream.getTotalAckDataLength()) .isGreaterThanOrEqualTo(FLUSH_SIZE); // watchForCommit will clean up atleast one entry from the map where each // entry corresponds to flushSize worth of data assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(1); // This will flush the data and update the flush length and the map. key.flush(); // Since the data in the buffer is already flushed, flush here will have // no impact on the counters and data structures assertThat(blockOutputStream.getBufferPool().getSize()) .isLessThanOrEqualTo(4); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); // flush will make sure one more entry gets updated in the map assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(2); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(1, raftClient.getCommitInfoMap().size()); // Close the containers on the Datanode and write more data TestHelper.waitForContainerClose(key, cluster); // 4 writeChunks = maxFlushSize + 2 putBlocks will be discarded here // once exception is hit key.write(data1); // As a part of handling the exception, 4 failed writeChunks will be // rewritten plus one partial chunk plus two putBlocks for flushSize // and one flush for partial chunk key.flush(); assertInstanceOf(ContainerNotOpenException.class, checkForException(blockOutputStream.getIoException())); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); // commitInfoMap will remain intact as there is no server failure assertEquals(1, raftClient.getCommitInfoMap().size()); assertEquals(2, keyOutputStream.getStreamEntries().size()); // now close the stream, It will update ack length after watchForCommit key.close(); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(dataLength, blockOutputStream.getTotalAckDataLength()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(0, keyOutputStream.getLocationInfoList().size()); // Written the same data twice byte[] bytes = ArrayUtils.addAll(data1, data1); validateData(keyName, bytes, client.getObjectStore(), VOLUME, BUCKET); } @ParameterizedTest @MethodSource("clientParameters") void testDatanodeFailureWithSingleNode(boolean flushDelay, boolean enablePiggybacking) throws Exception { OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking); try (OzoneClient client = newClient(cluster.getConf(), config)) { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName, 0, ReplicationFactor.ONE); int dataLength = MAX_FLUSH_SIZE + CHUNK_SIZE; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); // since its hitting the full bufferCondition, it will call watchForCommit // and completes at least putBlock for first flushSize worth of data KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); assertEquals(1, keyOutputStream.getStreamEntries().size()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); // we have just written data more than flush Size(2 chunks), at this time // buffer pool will have 3 buffers allocated worth of chunk size assertEquals(4, blockOutputStream.getBufferPool().getSize()); // writtenDataLength as well flushedDataLength will be updated here assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(MAX_FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength()); // since data equals to maxBufferSize is written, this will be a blocking // call and hence will wait for atleast flushSize worth of data to get // ack'd by all servers right here assertThat(blockOutputStream.getTotalAckDataLength()) .isGreaterThanOrEqualTo(FLUSH_SIZE); // watchForCommit will clean up atleast flushSize worth of data buffer // where each entry corresponds to flushSize worth of data assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(2); // This will flush the data and update the flush length and the map. key.flush(); // Since the data in the buffer is already flushed, flush here will have // no impact on the counters and data structures assertEquals(4, blockOutputStream.getBufferPool().getSize()); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); // flush will make sure one more entry gets updated in the map assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(1, raftClient.getCommitInfoMap().size()); Pipeline pipeline = raftClient.getPipeline(); cluster.shutdownHddsDatanode(pipeline.getNodes().get(0)); // again write data with more than max buffer limit. This will call // watchForCommit again. No write will happen in the current block and // data will be rewritten to the next block. key.write(data1); key.flush(); assertInstanceOf(RaftRetryFailureException.class, checkForException(blockOutputStream.getIoException())); assertEquals(1, raftClient.getCommitInfoMap().size()); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); assertEquals(2, keyOutputStream.getStreamEntries().size()); // now close the stream, It will update ack length after watchForCommit key.close(); assertEquals(dataLength, blockOutputStream.getTotalAckDataLength()); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(0, keyOutputStream.getStreamEntries().size()); assertEquals(0, keyOutputStream.getLocationInfoList().size()); // Written the same data twice byte[] bytes = ArrayUtils.addAll(data1, data1); cluster.restartHddsDatanode(pipeline.getNodes().get(0), true); validateData(keyName, bytes, client.getObjectStore(), VOLUME, BUCKET); } } @ParameterizedTest @MethodSource("clientParameters") void testDatanodeFailureWithPreAllocation(boolean flushDelay, boolean enablePiggybacking) throws Exception { OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking); try (OzoneClient client = newClient(cluster.getConf(), config)) { String keyName = getKeyName(); OzoneOutputStream key = createKey(client, keyName, 3 * BLOCK_SIZE, ReplicationFactor.ONE); int dataLength = MAX_FLUSH_SIZE + CHUNK_SIZE; byte[] data1 = RandomUtils.secure().randomBytes(dataLength); key.write(data1); // since its hitting the full bufferCondition, it will call watchForCommit // and completes at least putBlock for first flushSize worth of data KeyOutputStream keyOutputStream = assertInstanceOf(KeyOutputStream.class, key.getOutputStream()); assertEquals(3, keyOutputStream.getStreamEntries().size()); RatisBlockOutputStream blockOutputStream = assertInstanceOf(RatisBlockOutputStream.class, keyOutputStream.getStreamEntries().get(0).getOutputStream()); // we have just written data more than flush Size(2 chunks), at this time // buffer pool will have 3 buffers allocated worth of chunk size assertEquals(4, blockOutputStream.getBufferPool().getSize()); // writtenDataLength as well flushedDataLength will be updated here assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(MAX_FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength()); // since data equals to maxBufferSize is written, this will be a blocking // call and hence will wait for atleast flushSize worth of data to get // ack'd by all servers right here assertThat(blockOutputStream.getTotalAckDataLength()) .isGreaterThanOrEqualTo(FLUSH_SIZE); // watchForCommit will clean up atleast flushSize worth of data buffer // where each entry corresponds to flushSize worth of data assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size()) .isLessThanOrEqualTo(2); // This will flush the data and update the flush length and // the map. key.flush(); // Since the data in the buffer is already flushed, flush here will have // no impact on the counters and data structures assertEquals(4, blockOutputStream.getBufferPool().getSize()); assertEquals(dataLength, blockOutputStream.getWrittenDataLength()); assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength()); // flush will make sure one more entry gets updated in the map assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); XceiverClientRatis raftClient = (XceiverClientRatis) blockOutputStream.getXceiverClient(); assertEquals(1, raftClient.getCommitInfoMap().size()); Pipeline pipeline = raftClient.getPipeline(); cluster.shutdownHddsDatanode(pipeline.getNodes().get(0)); // again write data with more than max buffer limit. This will call // watchForCommit again. No write will happen and key.write(data1); key.flush(); assertInstanceOf(RaftRetryFailureException.class, checkForException(blockOutputStream.getIoException())); // Make sure the retryCount is reset after the exception is handled assertEquals(0, keyOutputStream.getRetryCount()); assertEquals(1, raftClient.getCommitInfoMap().size()); // now close the stream, It will update ack length after watchForCommit key.close(); assertEquals(dataLength, blockOutputStream.getTotalAckDataLength()); // make sure the bufferPool is empty assertEquals(0, blockOutputStream.getBufferPool().computeBufferData()); assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size()); assertEquals(0, keyOutputStream.getLocationInfoList().size()); cluster.restartHddsDatanode(pipeline.getNodes().get(0), true); // Written the same data twice byte[] bytes = ArrayUtils.addAll(data1, data1); validateData(keyName, bytes, client.getObjectStore(), VOLUME, BUCKET); } } private void stopAndRemove(DatanodeDetails dn) throws IOException { HddsDatanodeService datanode = cluster.getHddsDatanodes().remove(cluster.getHddsDatanodeIndex(dn)); datanode.stop(); datanode.join(); } }
googleapis/google-cloud-java
35,473
java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/GetServerConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1/cluster_service.proto // Protobuf Java Version: 3.25.8 package com.google.container.v1; /** * * * <pre> * Gets the current Kubernetes Engine service configuration. * </pre> * * Protobuf type {@code google.container.v1.GetServerConfigRequest} */ public final class GetServerConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1.GetServerConfigRequest) GetServerConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetServerConfigRequest.newBuilder() to construct. private GetServerConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetServerConfigRequest() { projectId_ = ""; zone_ = ""; name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetServerConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_GetServerConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_GetServerConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.GetServerConfigRequest.class, com.google.container.v1.GetServerConfigRequest.Builder.class); } public static final int PROJECT_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object projectId_ = ""; /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4202 * @return The projectId. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4202 * @return The bytes for projectId. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ZONE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object zone_ = ""; /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * to return operations for. This field has been deprecated and replaced by * the name field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4208 * @return The zone. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getZone() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * to return operations for. This field has been deprecated and replaced by * the name field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4208 * @return The bytes for zone. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAME_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * The name (project and location) of the server config to get, * specified in the format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code>string name = 4;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name (project and location) of the server config to get, * specified in the format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code>string name = 4;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, name_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, name_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1.GetServerConfigRequest)) { return super.equals(obj); } com.google.container.v1.GetServerConfigRequest other = (com.google.container.v1.GetServerConfigRequest) obj; if (!getProjectId().equals(other.getProjectId())) return false; if (!getZone().equals(other.getZone())) return false; if (!getName().equals(other.getName())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); hash = (37 * hash) + ZONE_FIELD_NUMBER; hash = (53 * hash) + getZone().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1.GetServerConfigRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.GetServerConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.GetServerConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.GetServerConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.GetServerConfigRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.GetServerConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.GetServerConfigRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.GetServerConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.GetServerConfigRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1.GetServerConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.GetServerConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.GetServerConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1.GetServerConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Gets the current Kubernetes Engine service configuration. * </pre> * * Protobuf type {@code google.container.v1.GetServerConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1.GetServerConfigRequest) com.google.container.v1.GetServerConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_GetServerConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_GetServerConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.GetServerConfigRequest.class, com.google.container.v1.GetServerConfigRequest.Builder.class); } // Construct using com.google.container.v1.GetServerConfigRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; projectId_ = ""; zone_ = ""; name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_GetServerConfigRequest_descriptor; } @java.lang.Override public com.google.container.v1.GetServerConfigRequest getDefaultInstanceForType() { return com.google.container.v1.GetServerConfigRequest.getDefaultInstance(); } @java.lang.Override public com.google.container.v1.GetServerConfigRequest build() { com.google.container.v1.GetServerConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1.GetServerConfigRequest buildPartial() { com.google.container.v1.GetServerConfigRequest result = new com.google.container.v1.GetServerConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.container.v1.GetServerConfigRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.projectId_ = projectId_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.zone_ = zone_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.name_ = name_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1.GetServerConfigRequest) { return mergeFrom((com.google.container.v1.GetServerConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1.GetServerConfigRequest other) { if (other == com.google.container.v1.GetServerConfigRequest.getDefaultInstance()) return this; if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getZone().isEmpty()) { zone_ = other.zone_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { projectId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { zone_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object projectId_ = ""; /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4202 * @return The projectId. */ @java.lang.Deprecated public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4202 * @return The bytes for projectId. */ @java.lang.Deprecated public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4202 * @param value The projectId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setProjectId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4202 * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4202 * @param value The bytes for projectId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setProjectIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object zone_ = ""; /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * to return operations for. This field has been deprecated and replaced by * the name field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4208 * @return The zone. */ @java.lang.Deprecated public java.lang.String getZone() { java.lang.Object ref = zone_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * to return operations for. This field has been deprecated and replaced by * the name field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4208 * @return The bytes for zone. */ @java.lang.Deprecated public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * to return operations for. This field has been deprecated and replaced by * the name field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4208 * @param value The zone to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setZone(java.lang.String value) { if (value == null) { throw new NullPointerException(); } zone_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * to return operations for. This field has been deprecated and replaced by * the name field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4208 * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearZone() { zone_ = getDefaultInstance().getZone(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * to return operations for. This field has been deprecated and replaced by * the name field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.GetServerConfigRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4208 * @param value The bytes for zone to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setZoneBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); zone_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object name_ = ""; /** * * * <pre> * The name (project and location) of the server config to get, * specified in the format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code>string name = 4;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name (project and location) of the server config to get, * specified in the format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code>string name = 4;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name (project and location) of the server config to get, * specified in the format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code>string name = 4;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The name (project and location) of the server config to get, * specified in the format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code>string name = 4;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The name (project and location) of the server config to get, * specified in the format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code>string name = 4;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1.GetServerConfigRequest) } // @@protoc_insertion_point(class_scope:google.container.v1.GetServerConfigRequest) private static final com.google.container.v1.GetServerConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1.GetServerConfigRequest(); } public static com.google.container.v1.GetServerConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetServerConfigRequest> PARSER = new com.google.protobuf.AbstractParser<GetServerConfigRequest>() { @java.lang.Override public GetServerConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GetServerConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetServerConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1.GetServerConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/uima-uimaj
35,676
uimaj-core/src/main/java/org/apache/uima/util/impl/XMLParser_impl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.util.impl; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.Collections; import java.util.HashMap; import java.util.Map; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.apache.uima.UIMAFramework; import org.apache.uima.UIMA_IllegalArgumentException; import org.apache.uima.UIMA_IllegalStateException; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.analysis_engine.ResultSpecification; import org.apache.uima.analysis_engine.TaeDescription; import org.apache.uima.collection.CasConsumerDescription; import org.apache.uima.collection.CasInitializerDescription; import org.apache.uima.collection.CollectionReaderDescription; import org.apache.uima.collection.metadata.CpeDescription; import org.apache.uima.flow.FlowControllerDescription; import org.apache.uima.internal.util.XMLUtils; import org.apache.uima.resource.CustomResourceSpecifier; import org.apache.uima.resource.PearSpecifier; import org.apache.uima.resource.ResourceSpecifier; import org.apache.uima.resource.URISpecifier; import org.apache.uima.resource.metadata.FsIndexCollection; import org.apache.uima.resource.metadata.ResourceManagerConfiguration; import org.apache.uima.resource.metadata.ResourceMetaData; import org.apache.uima.resource.metadata.TypePriorities; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.apache.uima.resource.metadata.impl.MetaDataObject_impl; import org.apache.uima.search.IndexBuildSpecification; import org.apache.uima.util.InvalidXMLException; import org.apache.uima.util.Level; import org.apache.uima.util.SaxDeserializer; import org.apache.uima.util.XMLInputSource; import org.apache.uima.util.XMLParser; import org.apache.uima.util.XMLizable; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; /** * Reference implementation of {@link XMLParser}. * * */ public class XMLParser_impl implements XMLParser { /** * resource bundle for log messages */ private static final String LOG_RESOURCE_BUNDLE = "org.apache.uima.impl.log_messages"; /** * current class */ private static final Class<XMLParser_impl> CLASS_NAME = XMLParser_impl.class; /** * The URL to the Resource Specifier XML Schema file */ private static final URL SCHEMA_URL; static { URL schemaURL = XMLParser_impl.class.getResource(RESOURCE_SPECIFIER_SCHEMA_NAME); if (schemaURL == null) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.WARNING, CLASS_NAME.getName(), "getSchemaURL", LOG_RESOURCE_BUNDLE, "UIMA_resource_specifier_schema_not_found__WARNING"); } else { String urlString = schemaURL.toString().replaceAll(" ", "%20"); try { schemaURL = new URL(urlString); } catch (MalformedURLException e) { } } SCHEMA_URL = schemaURL; } /** * Map from XML element names to Class objects. */ protected Map<String, Class<? extends XMLizable>> mElementToClassMap = Collections .synchronizedMap(new HashMap<>()); /** * Whether schema validation is enabled. */ protected boolean mSchemaValidationEnabled = false; protected static final ParsingOptions DEFAULT_PARSING_OPTIONS = new ParsingOptions(true); /** * Creates a new XMLParser_impl. * * @throws ParserConfigurationException * if the underlying XML parser could not be constructed */ public XMLParser_impl() throws ParserConfigurationException { } /** * @see org.apache.uima.util.XMLParser#enableSchemaValidation(boolean) */ @Override public void enableSchemaValidation(boolean aEnable) { mSchemaValidationEnabled = aEnable; } /** * Parses an XML input stream and produces an object. * * @param aInput * the input source from which to read the XML document * @param aNamespaceForSchema * XML namespace for elements to be validated against XML schema. If null, no schema will * be used. * @param aSchemaUrl * URL to XML schema that will be used to validate the XML document. If null, no schema * will be used. * * @return an <code>XMLizable</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid object */ @Override public XMLizable parse(XMLInputSource aInput, String aNamespaceForSchema, URL aSchemaUrl, XMLParser.ParsingOptions aOptions) throws InvalidXMLException { URL urlToParse = aInput.getURL(); try { SAXParserFactory factory = XMLUtils.createSAXParserFactory(); // Turn on namespace support factory.setNamespaceAware(true); SAXParser parser = factory.newSAXParser(); // unless multi-threaded, in the future, if // performance issue, can save this , and reuse // with reset() XMLReader reader = parser.getXMLReader(); reader.setFeature("http://xml.org/sax/features/namespace-prefixes", true); // reader.setFeature("http://xml.org/sax/features/namespaces", true); // Is this needed? // enable validation if requested if (mSchemaValidationEnabled && aNamespaceForSchema != null && aSchemaUrl != null) { try { reader.setFeature("http://apache.org/xml/features/validation/schema", true); reader.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", aNamespaceForSchema + " " + aSchemaUrl); reader.setFeature("http://xml.org/sax/features/validation", true); } catch (SAXNotRecognizedException e) { UIMAFramework.getLogger().log(Level.INFO, "The installed XML Parser does not support schema validation. No validation will occur."); } } // set up InputSource InputSource input = new InputSource(); input.setByteStream(aInput.getInputStream()); String systemId; if (urlToParse != null) { systemId = urlToParse.toString(); } else { systemId = new File(System.getProperty("user.dir")).toURL().toString(); } input.setSystemId(systemId); // set up error handler to catch validation errors\ ParseErrorHandler errorHandler = new ParseErrorHandler(); reader.setErrorHandler(errorHandler); // Parse with SaxDeserializer SaxDeserializer deser = new SaxDeserializer_impl(this, aOptions); reader.setContentHandler(deser); if (aOptions.preserveComments) { reader.setProperty("http://xml.org/sax/properties/lexical-handler", deser); } reader.parse(input); // if there was an exception, throw it if (errorHandler.getException() != null) { throw errorHandler.getException(); } // otherwise build the UIMA XMLizable object and return it XMLizable result = deser.getObject(); if (result instanceof MetaDataObject_impl) { // set Source URL (needed to later resolve descriptor-relative paths) ((MetaDataObject_impl) result).setSourceUrl(urlToParse); } return result; } catch (Exception e) { String sourceFile = urlToParse != null ? urlToParse.toString() : "<unknown source>"; throw new InvalidXMLException(InvalidXMLException.INVALID_DESCRIPTOR_FILE, new Object[] { sourceFile }, e); } } /** * Parses an XML input stream and produces an object. * * @param aInput * the input source from which to read the XML document * @param aNamespaceForSchema * XML namespace for elements to be validated against XML schema. If null, no schema will * be used. * @param aSchemaUrl * URL to XML schema that will be used to validate the XML document. If null, no schema * will be used. * * @return an <code>XMLizable</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid object */ @Override public XMLizable parse(XMLInputSource aInput, String aNamespaceForSchema, URL aSchemaUrl) throws InvalidXMLException { return parse(aInput, aNamespaceForSchema, aSchemaUrl, DEFAULT_PARSING_OPTIONS); } /** * Parses an XML input stream and produces an object. No schema validation will be done. * * @param aInput * the input source from which to read the XML document * * @return an <code>XMLizable</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid object */ @Override public XMLizable parse(XMLInputSource aInput) throws InvalidXMLException { return parse(aInput, null, null, DEFAULT_PARSING_OPTIONS); } /* * (non-Javadoc) * * @see org.apache.uima.util.XMLParser#parse(org.apache.uima.util.XMLInputSource, * org.apache.uima.util.XMLParser.ParsingOptions) */ @Override public XMLizable parse(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { return parse(aInput, null, null, aOptions); } /** * Builds an object from its XML DOM representation. * * @param aElement * a DOM Element * * @return an <code>XMLizable</code> object constructed from the DOM element * * @throws InvalidXMLException * if the XML element does not specify a valid object */ @Override public XMLizable buildObject(Element aElement) throws InvalidXMLException { return buildObject(aElement, new ParsingOptions(true)); } /** * Builds an object from its XML DOM representation. * * @param aElement * a DOM Element * * @return an <code>XMLizable</code> object constructed from the DOM element * * @throws InvalidXMLException * if the XML element does not specify a valid object */ @Override public XMLizable buildObject(Element aElement, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate a Class that can be built from the element Class<? extends XMLizable> cls = mElementToClassMap.get(aElement.getTagName()); if (cls == null) { throw new InvalidXMLException(InvalidXMLException.UNKNOWN_ELEMENT, new Object[] { aElement.getTagName() }); } // resolve the class name and instantiate the class XMLizable object; try { object = cls.newInstance(); } catch (Exception e) { throw new UIMA_IllegalStateException( UIMA_IllegalStateException.COULD_NOT_INSTANTIATE_XMLIZABLE, new Object[] { cls.getName() }, e); } callBuildFromXMLElement(aElement, object, aOptions); return object; } private void callBuildFromXMLElement(Element aElement, XMLizable object, ParsingOptions aOptions) throws InvalidXMLException { if (aOptions.preserveComments && (object instanceof MetaDataObject_impl)) { ((MetaDataObject_impl) object).setInfoset(aElement); } object.buildFromXMLElement(aElement, this, aOptions); } /* * (non-Javadoc) * * @see org.apache.uima.util.XMLParser#buildObjectOrPrimitive(Element, ParsingOptions) */ @Override public Object buildObjectOrPrimitive(Element aElement, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate a Class that can be built from the element Class<? extends XMLizable> cls = mElementToClassMap.get(aElement.getTagName()); if (cls == null) { // attempt to parse as primitive Object primObj = XMLUtils.readPrimitiveValue(aElement); if (primObj != null) { return primObj; } // unknown element - throw exception throw new InvalidXMLException(InvalidXMLException.UNKNOWN_ELEMENT, new Object[] { aElement.getTagName() }); } // resolve the class name and instantiate the class XMLizable object; try { object = cls.newInstance(); } catch (Exception e) { throw new UIMA_IllegalStateException( UIMA_IllegalStateException.COULD_NOT_INSTANTIATE_XMLIZABLE, new Object[] { cls.getName() }, e); } // construct the XMLizable object from the XML element callBuildFromXMLElement(aElement, object, aOptions); return object; } /** * Parses a ResourceSpecifier from an XML input stream. XML schema validation will be done against * the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>ResourceSpecifier</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid ResourceSpecifier */ @Override public ResourceSpecifier parseResourceSpecifier(XMLInputSource aInput) throws InvalidXMLException { return parseResourceSpecifier(aInput, DEFAULT_PARSING_OPTIONS); } /** * Parses a ResourceSpecifier from an XML input stream. XML schema validation will be done against * the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>ResourceSpecifier</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid ResourceSpecifier */ @Override public ResourceSpecifier parseResourceSpecifier(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof ResourceSpecifier) { return (ResourceSpecifier) object; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { ResourceSpecifier.class.getName(), object.getClass().getName() }); } } /** * Parses a ResourceMetaData object from an XML input stream. XML schema validation will be done * against the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>ResourceMetaData</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid ResourceSpecifier */ @Override public ResourceMetaData parseResourceMetaData(XMLInputSource aInput) throws InvalidXMLException { return parseResourceMetaData(aInput, DEFAULT_PARSING_OPTIONS); } /** * Parses a ResourceMetaData object from an XML input stream. XML schema validation will be done * against the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>ResourceMetaData</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid ResourceSpecifier */ @Override public ResourceMetaData parseResourceMetaData(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof ResourceMetaData) { return (ResourceMetaData) object; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { ResourceMetaData.class.getName(), object.getClass().getName() }); } } /** * Parses a URISpecifier from an XML input stream. XML schema validation will be done against the * {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>URISpecifier</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid URISpecifier */ @Override public URISpecifier parseURISpecifier(XMLInputSource aInput) throws InvalidXMLException { return parseURISpecifier(aInput, DEFAULT_PARSING_OPTIONS); } /** * Parses a URISpecifier from an XML input stream. XML schema validation will be done against the * {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>URISpecifier</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid URISpecifier */ @Override public URISpecifier parseURISpecifier(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof URISpecifier) { return (URISpecifier) object; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { URISpecifier.class.getName(), object.getClass().getName() }); } } /** * Parses a AnalysisEngineDescription from an XML input stream. XML schema validation will be done * against the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>AnalysisEngineDescription</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid AnalysisEngineDescription */ @Override public AnalysisEngineDescription parseAnalysisEngineDescription(XMLInputSource aInput) throws InvalidXMLException { return parseAnalysisEngineDescription(aInput, DEFAULT_PARSING_OPTIONS); } /** * Parses a AnalysisEngineDescription from an XML input stream. XML schema validation will be done * against the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>AnalysisEngineDescription</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid AnalysisEngineDescription */ @Override public AnalysisEngineDescription parseAnalysisEngineDescription(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof AnalysisEngineDescription) { return (AnalysisEngineDescription) object; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { AnalysisEngineDescription.class.getName(), object.getClass().getName() }); } } /** * Parses a TaeDescription from an XML input stream. XML schema validation will be done against * the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>TaeDescription</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid TaeDescription * * @deprecated since v2.0 * @forRemoval 4.0.0 */ @Override @Deprecated(since = "2.0.0") public TaeDescription parseTaeDescription(XMLInputSource aInput) throws InvalidXMLException { return parseTaeDescription(aInput, DEFAULT_PARSING_OPTIONS); } /** * Parses a TaeDescription from an XML input stream. XML schema validation will be done against * the {@link #RESOURCE_SPECIFIER_SCHEMA_NAME} if it can be found in the classpath. * * @param aInput * the input source from which to read the XML document * * @return a <code>TaeDescription</code> object constructed from the XML document * * @throws InvalidXMLException * if the input XML is not valid or does not specify a valid TaeDescription * * @deprecated since v2.0 * @forRemoval 4.0.0 */ @Override @Deprecated(since = "2.0.0") public TaeDescription parseTaeDescription(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof TaeDescription) { return (TaeDescription) object; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { TaeDescription.class.getName(), object.getClass().getName() }); } } /** * @see org.apache.uima.util.XMLParser#parseResultSpecification(org.apache.uima.util.XMLInputSource) */ @Override public ResultSpecification parseResultSpecification(XMLInputSource aInput) throws InvalidXMLException { return parseResultSpecification(aInput, DEFAULT_PARSING_OPTIONS); } /** * @see org.apache.uima.util.XMLParser#parseResultSpecification(org.apache.uima.util.XMLInputSource) */ @Override public ResultSpecification parseResultSpecification(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, null, aOptions); if (object instanceof ResultSpecification) { return (ResultSpecification) object; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { ResultSpecification.class.getName(), object.getClass().getName() }); } } /** * @see org.apache.uima.util.XMLParser#parseCasConsumerDescription(org.apache.uima.util.XMLInputSource) */ @Override public CasConsumerDescription parseCasConsumerDescription(XMLInputSource aInput) throws InvalidXMLException { return parseCasConsumerDescription(aInput, DEFAULT_PARSING_OPTIONS); } /** * @see org.apache.uima.util.XMLParser#parseCasConsumerDescription(org.apache.uima.util.XMLInputSource) */ @Override public CasConsumerDescription parseCasConsumerDescription(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof CasConsumerDescription) { return (CasConsumerDescription) object; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { CasConsumerDescription.class.getName(), object.getClass().getName() }); } } @Override @Deprecated public CasInitializerDescription parseCasInitializerDescription(XMLInputSource aInput) throws InvalidXMLException { return parseCasInitializerDescription(aInput, DEFAULT_PARSING_OPTIONS); } @Override @Deprecated(since = "3.6.0") public CasInitializerDescription parseCasInitializerDescription(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema var object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof CasInitializerDescription casInitializerDescription) { return casInitializerDescription; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { CasInitializerDescription.class.getName(), object.getClass().getName() }); } } @Override public CollectionReaderDescription parseCollectionReaderDescription(XMLInputSource aInput) throws InvalidXMLException { return parseCollectionReaderDescription(aInput, DEFAULT_PARSING_OPTIONS); } @Override public CollectionReaderDescription parseCollectionReaderDescription(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof CollectionReaderDescription collectionReaderDescription) { return collectionReaderDescription; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { CollectionReaderDescription.class.getName(), object.getClass().getName() }); } } @Override public CpeDescription parseCpeDescription(XMLInputSource aInput) throws InvalidXMLException { XMLizable object = parse(aInput); if (object instanceof CpeDescription cpeDescription) { return cpeDescription; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { CpeDescription.class.getName(), object.getClass().getName() }); } } @Override public TypePriorities parseTypePriorities(XMLInputSource aInput) throws InvalidXMLException { return parseTypePriorities(aInput, DEFAULT_PARSING_OPTIONS); } @Override public TypePriorities parseTypePriorities(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof TypePriorities typePriorities) { return typePriorities; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { TypePriorities.class.getName(), object.getClass().getName() }); } } @Override public TypeSystemDescription parseTypeSystemDescription(XMLInputSource aInput) throws InvalidXMLException { return parseTypeSystemDescription(aInput, DEFAULT_PARSING_OPTIONS); } @Override public TypeSystemDescription parseTypeSystemDescription(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof TypeSystemDescription typeSystemDescription) { return typeSystemDescription; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { TypeSystemDescription.class.getName(), object.getClass().getName() }); } } @Override public FsIndexCollection parseFsIndexCollection(XMLInputSource aInput) throws InvalidXMLException { return parseFsIndexCollection(aInput, DEFAULT_PARSING_OPTIONS); } @Override public FsIndexCollection parseFsIndexCollection(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof FsIndexCollection fsIndexCollection) { return fsIndexCollection; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { FsIndexCollection.class.getName(), object.getClass().getName() }); } } @Override public ResourceManagerConfiguration parseResourceManagerConfiguration(XMLInputSource aInput) throws InvalidXMLException { return parseResourceManagerConfiguration(aInput, DEFAULT_PARSING_OPTIONS); } @Override public ResourceManagerConfiguration parseResourceManagerConfiguration(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof ResourceManagerConfiguration resourceManagerConfiguration) { return resourceManagerConfiguration; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { ResourceManagerConfiguration.class.getName(), object.getClass().getName() }); } } @Override public FlowControllerDescription parseFlowControllerDescription(XMLInputSource aInput) throws InvalidXMLException { return parseFlowControllerDescription(aInput, DEFAULT_PARSING_OPTIONS); } @Override public FlowControllerDescription parseFlowControllerDescription(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof FlowControllerDescription flowControllerDescription) { return flowControllerDescription; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { FlowControllerDescription.class.getName(), object.getClass().getName() }); } } @Override public CustomResourceSpecifier parseCustomResourceSpecifier(XMLInputSource aInput) throws InvalidXMLException { return parseCustomResourceSpecifier(aInput, DEFAULT_PARSING_OPTIONS); } @Override public CustomResourceSpecifier parseCustomResourceSpecifier(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof CustomResourceSpecifier customResourceSpecifier) { return customResourceSpecifier; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { CustomResourceSpecifier.class.getName(), object.getClass().getName() }); } } @Override public PearSpecifier parsePearSpecifier(XMLInputSource aInput) throws InvalidXMLException { return parsePearSpecifier(aInput, DEFAULT_PARSING_OPTIONS); } @Override public PearSpecifier parsePearSpecifier(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { // attempt to locate resource specifier schema XMLizable object = parse(aInput, RESOURCE_SPECIFIER_NAMESPACE, SCHEMA_URL, aOptions); if (object instanceof PearSpecifier pearSpecifier) { return pearSpecifier; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { PearSpecifier.class.getName(), object.getClass().getName() }); } } @Override public IndexBuildSpecification parseIndexBuildSpecification(XMLInputSource aInput) throws InvalidXMLException { return parseIndexBuildSpecification(aInput, DEFAULT_PARSING_OPTIONS); } @Override public IndexBuildSpecification parseIndexBuildSpecification(XMLInputSource aInput, ParsingOptions aOptions) throws InvalidXMLException { XMLizable object = parse(aInput, aOptions); if (object instanceof IndexBuildSpecification indexBuildSpecification) { return indexBuildSpecification; } else { throw new InvalidXMLException(InvalidXMLException.INVALID_CLASS, new Object[] { IndexBuildSpecification.class.getName(), object.getClass().getName() }); } } /** * Configures this XMLParser by registering a mapping between the name of an XML element and the * Class of object to be built from elements with that name. Ignores entries with no name, i.e. * are not configured via XML * * @param aElementName * the name of an XML element * @param aClassName * the name of a Class of object to be built. This class must implement {@link XMLizable} * and have a zero-argument constructor. * * @throws ClassNotFoundException * if the class named by <code>aClassName</code> could not be found * @throws UIMA_IllegalArgumentException * if the class named by <code>aClassName</code> does not implement * <code>XMLIzable</code>. @ */ @Override @SuppressWarnings("unchecked") public void addMapping(String aElementName, String aClassName) throws ClassNotFoundException { if (aElementName == null) { return; } // resolve the class name and ensure that it implements XMLizable Class<? extends XMLizable> cls = (Class<? extends XMLizable>) Class.forName(aClassName); if (XMLizable.class.isAssignableFrom(cls)) { // add to the map mElementToClassMap.put(aElementName, cls); } else { throw new UIMA_IllegalArgumentException( UIMA_IllegalArgumentException.MUST_IMPLEMENT_XMLIZABLE, new Object[] { aClassName }); } } @Override public SaxDeserializer newSaxDeserializer() { return new SaxDeserializer_impl(this, new XMLParser.ParsingOptions(true)); } @Override public SaxDeserializer newSaxDeserializer(XMLParser.ParsingOptions aOptions) { return new SaxDeserializer_impl(this, aOptions); } /** * Error handler for XML parsing. Stores first error in <code>exception</code> field for later * retrieval. */ static class ParseErrorHandler extends DefaultHandler { private SAXParseException mException = null; @Override public void error(SAXParseException aError) { if (mException == null) mException = aError; } @Override public void fatalError(SAXParseException aError) { if (mException == null) mException = aError; } @Override public void warning(SAXParseException aWarning) { System.err.println("XML Warning: " + aWarning.getMessage()); } public SAXParseException getException() { return mException; } public void clear() { mException = null; } } }
googleapis/google-cloud-java
35,425
java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ImportAptArtifactsErrorInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1/apt_artifact.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.artifactregistry.v1; /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo} */ public final class ImportAptArtifactsErrorInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo) ImportAptArtifactsErrorInfoOrBuilder { private static final long serialVersionUID = 0L; // Use ImportAptArtifactsErrorInfo.newBuilder() to construct. private ImportAptArtifactsErrorInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportAptArtifactsErrorInfo() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportAptArtifactsErrorInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportAptArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportAptArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo.Builder.class); } private int bitField0_; private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_SOURCE(1), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 1: return GCS_SOURCE; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int GCS_SOURCE_FIELD_NUMBER = 1; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource getGcsSource() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.getDefaultInstance(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.getDefaultInstance(); } public static final int ERROR_FIELD_NUMBER = 2; private com.google.rpc.Status error_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ @java.lang.Override public boolean hasError() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ @java.lang.Override public com.google.rpc.Status getError() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sourceCase_ == 1) { output.writeMessage( 1, (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getError()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sourceCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getError()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo other = (com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo) obj; if (hasError() != other.hasError()) return false; if (hasError()) { if (!getError().equals(other.getError())) return false; } if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 1: if (!getGcsSource().equals(other.getGcsSource())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasError()) { hash = (37 * hash) + ERROR_FIELD_NUMBER; hash = (53 * hash) + getError().hashCode(); } switch (sourceCase_) { case 1: hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getGcsSource().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo) com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportAptArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportAptArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo.Builder.class); } // Construct using // com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getErrorFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsSourceBuilder_ != null) { gcsSourceBuilder_.clear(); } error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportAptArtifactsErrorInfo_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo .getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo build() { com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo buildPartial() { com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo result = new com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0( com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs( com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; if (sourceCase_ == 1 && gcsSourceBuilder_ != null) { result.source_ = gcsSourceBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo) { return mergeFrom( (com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo other) { if (other == com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo .getDefaultInstance()) return this; if (other.hasError()) { mergeError(other.getError()); } switch (other.getSourceCase()) { case GCS_SOURCE: { mergeGcsSource(other.getGcsSource()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 1; break; } // case 10 case 18: { input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource, com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSourceOrBuilder> gcsSourceBuilder_; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource getGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource .getDefaultInstance(); } else { if (sourceCase_ == 1) { return gcsSourceBuilder_.getMessage(); } return com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsSourceBuilder_.setMessage(value); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.Builder builderForValue) { if (gcsSourceBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsSourceBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ public Builder mergeGcsSource( com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1 && source_ != com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource .getDefaultInstance()) { source_ = com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.newBuilder( (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 1) { gcsSourceBuilder_.mergeFrom(value); } else { gcsSourceBuilder_.setMessage(value); } } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ public Builder clearGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; } gcsSourceBuilder_.clear(); } return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ public com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.Builder getGcsSourceBuilder() { return getGcsSourceFieldBuilder().getBuilder(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) { return gcsSourceBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource gcs_source = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource, com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSourceOrBuilder> getGcsSourceFieldBuilder() { if (gcsSourceBuilder_ == null) { if (!(sourceCase_ == 1)) { source_ = com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource .getDefaultInstance(); } gcsSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource, com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSourceOrBuilder>( (com.google.devtools.artifactregistry.v1.ImportAptArtifactsGcsSource) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 1; onChanged(); return gcsSourceBuilder_; } private com.google.rpc.Status error_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorBuilder_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ public boolean hasError() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ public com.google.rpc.Status getError() { if (errorBuilder_ == null) { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } else { return errorBuilder_.getMessage(); } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } error_ = value; } else { errorBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status.Builder builderForValue) { if (errorBuilder_ == null) { error_ = builderForValue.build(); } else { errorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder mergeError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && error_ != null && error_ != com.google.rpc.Status.getDefaultInstance()) { getErrorBuilder().mergeFrom(value); } else { error_ = value; } } else { errorBuilder_.mergeFrom(value); } if (error_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder clearError() { bitField0_ = (bitField0_ & ~0x00000002); error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.Status.Builder getErrorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getErrorFieldBuilder().getBuilder(); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { if (errorBuilder_ != null) { return errorBuilder_.getMessageOrBuilder(); } else { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getErrorFieldBuilder() { if (errorBuilder_ == null) { errorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean()); error_ = null; } return errorBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo) private static final com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo(); } public static com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportAptArtifactsErrorInfo> PARSER = new com.google.protobuf.AbstractParser<ImportAptArtifactsErrorInfo>() { @java.lang.Override public ImportAptArtifactsErrorInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportAptArtifactsErrorInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportAptArtifactsErrorInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportAptArtifactsErrorInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,425
java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ImportYumArtifactsErrorInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1/yum_artifact.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.artifactregistry.v1; /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo} */ public final class ImportYumArtifactsErrorInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo) ImportYumArtifactsErrorInfoOrBuilder { private static final long serialVersionUID = 0L; // Use ImportYumArtifactsErrorInfo.newBuilder() to construct. private ImportYumArtifactsErrorInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportYumArtifactsErrorInfo() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportYumArtifactsErrorInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportYumArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportYumArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo.Builder.class); } private int bitField0_; private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_SOURCE(1), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 1: return GCS_SOURCE; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int GCS_SOURCE_FIELD_NUMBER = 1; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource getGcsSource() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.getDefaultInstance(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.getDefaultInstance(); } public static final int ERROR_FIELD_NUMBER = 2; private com.google.rpc.Status error_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ @java.lang.Override public boolean hasError() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ @java.lang.Override public com.google.rpc.Status getError() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sourceCase_ == 1) { output.writeMessage( 1, (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getError()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sourceCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getError()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo other = (com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo) obj; if (hasError() != other.hasError()) return false; if (hasError()) { if (!getError().equals(other.getError())) return false; } if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 1: if (!getGcsSource().equals(other.getGcsSource())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasError()) { hash = (37 * hash) + ERROR_FIELD_NUMBER; hash = (53 * hash) + getError().hashCode(); } switch (sourceCase_) { case 1: hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getGcsSource().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo) com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportYumArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportYumArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo.Builder.class); } // Construct using // com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getErrorFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsSourceBuilder_ != null) { gcsSourceBuilder_.clear(); } error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1_ImportYumArtifactsErrorInfo_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo .getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo build() { com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo buildPartial() { com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo result = new com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0( com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs( com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; if (sourceCase_ == 1 && gcsSourceBuilder_ != null) { result.source_ = gcsSourceBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo) { return mergeFrom( (com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo other) { if (other == com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo .getDefaultInstance()) return this; if (other.hasError()) { mergeError(other.getError()); } switch (other.getSourceCase()) { case GCS_SOURCE: { mergeGcsSource(other.getGcsSource()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 1; break; } // case 10 case 18: { input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource, com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSourceOrBuilder> gcsSourceBuilder_; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource getGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource .getDefaultInstance(); } else { if (sourceCase_ == 1) { return gcsSourceBuilder_.getMessage(); } return com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsSourceBuilder_.setMessage(value); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.Builder builderForValue) { if (gcsSourceBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsSourceBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ public Builder mergeGcsSource( com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1 && source_ != com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource .getDefaultInstance()) { source_ = com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.newBuilder( (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 1) { gcsSourceBuilder_.mergeFrom(value); } else { gcsSourceBuilder_.setMessage(value); } } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ public Builder clearGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; } gcsSourceBuilder_.clear(); } return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ public com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.Builder getGcsSourceBuilder() { return getGcsSourceFieldBuilder().getBuilder(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) { return gcsSourceBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource gcs_source = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource, com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSourceOrBuilder> getGcsSourceFieldBuilder() { if (gcsSourceBuilder_ == null) { if (!(sourceCase_ == 1)) { source_ = com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource .getDefaultInstance(); } gcsSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource, com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSourceOrBuilder>( (com.google.devtools.artifactregistry.v1.ImportYumArtifactsGcsSource) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 1; onChanged(); return gcsSourceBuilder_; } private com.google.rpc.Status error_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorBuilder_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ public boolean hasError() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ public com.google.rpc.Status getError() { if (errorBuilder_ == null) { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } else { return errorBuilder_.getMessage(); } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } error_ = value; } else { errorBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status.Builder builderForValue) { if (errorBuilder_ == null) { error_ = builderForValue.build(); } else { errorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder mergeError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && error_ != null && error_ != com.google.rpc.Status.getDefaultInstance()) { getErrorBuilder().mergeFrom(value); } else { error_ = value; } } else { errorBuilder_.mergeFrom(value); } if (error_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder clearError() { bitField0_ = (bitField0_ & ~0x00000002); error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.Status.Builder getErrorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getErrorFieldBuilder().getBuilder(); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { if (errorBuilder_ != null) { return errorBuilder_.getMessageOrBuilder(); } else { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getErrorFieldBuilder() { if (errorBuilder_ == null) { errorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean()); error_ = null; } return errorBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo) private static final com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo(); } public static com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportYumArtifactsErrorInfo> PARSER = new com.google.protobuf.AbstractParser<ImportYumArtifactsErrorInfo>() { @java.lang.Override public ImportYumArtifactsErrorInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportYumArtifactsErrorInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportYumArtifactsErrorInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1.ImportYumArtifactsErrorInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,555
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/errors/ExperimentErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/errors/experiment_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.errors; /** * <pre> * Container for enum describing possible experiment error. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.ExperimentErrorEnum} */ public final class ExperimentErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.errors.ExperimentErrorEnum) ExperimentErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use ExperimentErrorEnum.newBuilder() to construct. private ExperimentErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExperimentErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ExperimentErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ExperimentErrorProto.internal_static_google_ads_googleads_v21_errors_ExperimentErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.ExperimentErrorProto.internal_static_google_ads_googleads_v21_errors_ExperimentErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.ExperimentErrorEnum.class, com.google.ads.googleads.v21.errors.ExperimentErrorEnum.Builder.class); } /** * <pre> * Enum describing possible experiment errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v21.errors.ExperimentErrorEnum.ExperimentError} */ public enum ExperimentError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * The start date of an experiment cannot be set in the past. * Use a start date in the future. * </pre> * * <code>CANNOT_SET_START_DATE_IN_PAST = 2;</code> */ CANNOT_SET_START_DATE_IN_PAST(2), /** * <pre> * The end date of an experiment is before its start date. * Use an end date after the start date. * </pre> * * <code>END_DATE_BEFORE_START_DATE = 3;</code> */ END_DATE_BEFORE_START_DATE(3), /** * <pre> * The start date of an experiment is too far in the future. * Use a start date no more than 1 year in the future. * </pre> * * <code>START_DATE_TOO_FAR_IN_FUTURE = 4;</code> */ START_DATE_TOO_FAR_IN_FUTURE(4), /** * <pre> * The experiment has the same name as an existing active experiment. * </pre> * * <code>DUPLICATE_EXPERIMENT_NAME = 5;</code> */ DUPLICATE_EXPERIMENT_NAME(5), /** * <pre> * Experiments can only be modified when they are ENABLED. * </pre> * * <code>CANNOT_MODIFY_REMOVED_EXPERIMENT = 6;</code> */ CANNOT_MODIFY_REMOVED_EXPERIMENT(6), /** * <pre> * The start date of an experiment cannot be modified if the existing start * date has already passed. * </pre> * * <code>START_DATE_ALREADY_PASSED = 7;</code> */ START_DATE_ALREADY_PASSED(7), /** * <pre> * The end date of an experiment cannot be set in the past. * </pre> * * <code>CANNOT_SET_END_DATE_IN_PAST = 8;</code> */ CANNOT_SET_END_DATE_IN_PAST(8), /** * <pre> * The status of an experiment cannot be set to REMOVED. * </pre> * * <code>CANNOT_SET_STATUS_TO_REMOVED = 9;</code> */ CANNOT_SET_STATUS_TO_REMOVED(9), /** * <pre> * The end date of an expired experiment cannot be modified. * </pre> * * <code>CANNOT_MODIFY_PAST_END_DATE = 10;</code> */ CANNOT_MODIFY_PAST_END_DATE(10), /** * <pre> * The status is invalid. * </pre> * * <code>INVALID_STATUS = 11;</code> */ INVALID_STATUS(11), /** * <pre> * Experiment arm contains campaigns with invalid advertising channel type. * </pre> * * <code>INVALID_CAMPAIGN_CHANNEL_TYPE = 12;</code> */ INVALID_CAMPAIGN_CHANNEL_TYPE(12), /** * <pre> * A pair of trials share members and have overlapping date ranges. * </pre> * * <code>OVERLAPPING_MEMBERS_AND_DATE_RANGE = 13;</code> */ OVERLAPPING_MEMBERS_AND_DATE_RANGE(13), /** * <pre> * Experiment arm contains invalid traffic split. * </pre> * * <code>INVALID_TRIAL_ARM_TRAFFIC_SPLIT = 14;</code> */ INVALID_TRIAL_ARM_TRAFFIC_SPLIT(14), /** * <pre> * Experiment contains trial arms with overlapping traffic split. * </pre> * * <code>TRAFFIC_SPLIT_OVERLAPPING = 15;</code> */ TRAFFIC_SPLIT_OVERLAPPING(15), /** * <pre> * The total traffic split of trial arms is not equal to 100. * </pre> * * <code>SUM_TRIAL_ARM_TRAFFIC_UNEQUALS_TO_TRIAL_TRAFFIC_SPLIT_DENOMINATOR = 16;</code> */ SUM_TRIAL_ARM_TRAFFIC_UNEQUALS_TO_TRIAL_TRAFFIC_SPLIT_DENOMINATOR(16), /** * <pre> * Traffic split related settings (like traffic share bounds) can't be * modified after the experiment has started. * </pre> * * <code>CANNOT_MODIFY_TRAFFIC_SPLIT_AFTER_START = 17;</code> */ CANNOT_MODIFY_TRAFFIC_SPLIT_AFTER_START(17), /** * <pre> * The experiment could not be found. * </pre> * * <code>EXPERIMENT_NOT_FOUND = 18;</code> */ EXPERIMENT_NOT_FOUND(18), /** * <pre> * Experiment has not begun. * </pre> * * <code>EXPERIMENT_NOT_YET_STARTED = 19;</code> */ EXPERIMENT_NOT_YET_STARTED(19), /** * <pre> * The experiment cannot have more than one control arm. * </pre> * * <code>CANNOT_HAVE_MULTIPLE_CONTROL_ARMS = 20;</code> */ CANNOT_HAVE_MULTIPLE_CONTROL_ARMS(20), /** * <pre> * The experiment doesn't set in-design campaigns. * </pre> * * <code>IN_DESIGN_CAMPAIGNS_NOT_SET = 21;</code> */ IN_DESIGN_CAMPAIGNS_NOT_SET(21), /** * <pre> * Clients must use the graduate action to graduate experiments and cannot * set the status to GRADUATED directly. * </pre> * * <code>CANNOT_SET_STATUS_TO_GRADUATED = 22;</code> */ CANNOT_SET_STATUS_TO_GRADUATED(22), /** * <pre> * Cannot use shared budget on base campaign when scheduling an experiment. * </pre> * * <code>CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_SHARED_BUDGET = 23;</code> */ CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_SHARED_BUDGET(23), /** * <pre> * Cannot use custom budget on base campaign when scheduling an experiment. * </pre> * * <code>CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_CUSTOM_BUDGET = 24;</code> */ CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_CUSTOM_BUDGET(24), /** * <pre> * Invalid status transition. * </pre> * * <code>STATUS_TRANSITION_INVALID = 25;</code> */ STATUS_TRANSITION_INVALID(25), /** * <pre> * The experiment campaign name conflicts with a pre-existing campaign. * </pre> * * <code>DUPLICATE_EXPERIMENT_CAMPAIGN_NAME = 26;</code> */ DUPLICATE_EXPERIMENT_CAMPAIGN_NAME(26), /** * <pre> * Cannot remove in creation experiments. * </pre> * * <code>CANNOT_REMOVE_IN_CREATION_EXPERIMENT = 27;</code> */ CANNOT_REMOVE_IN_CREATION_EXPERIMENT(27), /** * <pre> * Cannot add campaign with deprecated ad types. Deprecated ad types: * ENHANCED_DISPLAY, GALLERY, GMAIL, KEYWORDLESS, TEXT. * </pre> * * <code>CANNOT_ADD_CAMPAIGN_WITH_DEPRECATED_AD_TYPES = 28;</code> */ CANNOT_ADD_CAMPAIGN_WITH_DEPRECATED_AD_TYPES(28), /** * <pre> * Sync can only be enabled for supported experiment types. Supported * experiment types: SEARCH_CUSTOM, DISPLAY_CUSTOM, * DISPLAY_AUTOMATED_BIDDING_STRATEGY, SEARCH_AUTOMATED_BIDDING_STRATEGY. * </pre> * * <code>CANNOT_ENABLE_SYNC_FOR_UNSUPPORTED_EXPERIMENT_TYPE = 29;</code> */ CANNOT_ENABLE_SYNC_FOR_UNSUPPORTED_EXPERIMENT_TYPE(29), /** * <pre> * Experiment length cannot be longer than max length. * </pre> * * <code>INVALID_DURATION_FOR_AN_EXPERIMENT = 30;</code> */ INVALID_DURATION_FOR_AN_EXPERIMENT(30), /** * <pre> * The experiment's campaigns must self-declare whether they contain * political advertising that targets the European Union. * </pre> * * <code>MISSING_EU_POLITICAL_ADVERTISING_SELF_DECLARATION = 31;</code> */ MISSING_EU_POLITICAL_ADVERTISING_SELF_DECLARATION(31), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * The start date of an experiment cannot be set in the past. * Use a start date in the future. * </pre> * * <code>CANNOT_SET_START_DATE_IN_PAST = 2;</code> */ public static final int CANNOT_SET_START_DATE_IN_PAST_VALUE = 2; /** * <pre> * The end date of an experiment is before its start date. * Use an end date after the start date. * </pre> * * <code>END_DATE_BEFORE_START_DATE = 3;</code> */ public static final int END_DATE_BEFORE_START_DATE_VALUE = 3; /** * <pre> * The start date of an experiment is too far in the future. * Use a start date no more than 1 year in the future. * </pre> * * <code>START_DATE_TOO_FAR_IN_FUTURE = 4;</code> */ public static final int START_DATE_TOO_FAR_IN_FUTURE_VALUE = 4; /** * <pre> * The experiment has the same name as an existing active experiment. * </pre> * * <code>DUPLICATE_EXPERIMENT_NAME = 5;</code> */ public static final int DUPLICATE_EXPERIMENT_NAME_VALUE = 5; /** * <pre> * Experiments can only be modified when they are ENABLED. * </pre> * * <code>CANNOT_MODIFY_REMOVED_EXPERIMENT = 6;</code> */ public static final int CANNOT_MODIFY_REMOVED_EXPERIMENT_VALUE = 6; /** * <pre> * The start date of an experiment cannot be modified if the existing start * date has already passed. * </pre> * * <code>START_DATE_ALREADY_PASSED = 7;</code> */ public static final int START_DATE_ALREADY_PASSED_VALUE = 7; /** * <pre> * The end date of an experiment cannot be set in the past. * </pre> * * <code>CANNOT_SET_END_DATE_IN_PAST = 8;</code> */ public static final int CANNOT_SET_END_DATE_IN_PAST_VALUE = 8; /** * <pre> * The status of an experiment cannot be set to REMOVED. * </pre> * * <code>CANNOT_SET_STATUS_TO_REMOVED = 9;</code> */ public static final int CANNOT_SET_STATUS_TO_REMOVED_VALUE = 9; /** * <pre> * The end date of an expired experiment cannot be modified. * </pre> * * <code>CANNOT_MODIFY_PAST_END_DATE = 10;</code> */ public static final int CANNOT_MODIFY_PAST_END_DATE_VALUE = 10; /** * <pre> * The status is invalid. * </pre> * * <code>INVALID_STATUS = 11;</code> */ public static final int INVALID_STATUS_VALUE = 11; /** * <pre> * Experiment arm contains campaigns with invalid advertising channel type. * </pre> * * <code>INVALID_CAMPAIGN_CHANNEL_TYPE = 12;</code> */ public static final int INVALID_CAMPAIGN_CHANNEL_TYPE_VALUE = 12; /** * <pre> * A pair of trials share members and have overlapping date ranges. * </pre> * * <code>OVERLAPPING_MEMBERS_AND_DATE_RANGE = 13;</code> */ public static final int OVERLAPPING_MEMBERS_AND_DATE_RANGE_VALUE = 13; /** * <pre> * Experiment arm contains invalid traffic split. * </pre> * * <code>INVALID_TRIAL_ARM_TRAFFIC_SPLIT = 14;</code> */ public static final int INVALID_TRIAL_ARM_TRAFFIC_SPLIT_VALUE = 14; /** * <pre> * Experiment contains trial arms with overlapping traffic split. * </pre> * * <code>TRAFFIC_SPLIT_OVERLAPPING = 15;</code> */ public static final int TRAFFIC_SPLIT_OVERLAPPING_VALUE = 15; /** * <pre> * The total traffic split of trial arms is not equal to 100. * </pre> * * <code>SUM_TRIAL_ARM_TRAFFIC_UNEQUALS_TO_TRIAL_TRAFFIC_SPLIT_DENOMINATOR = 16;</code> */ public static final int SUM_TRIAL_ARM_TRAFFIC_UNEQUALS_TO_TRIAL_TRAFFIC_SPLIT_DENOMINATOR_VALUE = 16; /** * <pre> * Traffic split related settings (like traffic share bounds) can't be * modified after the experiment has started. * </pre> * * <code>CANNOT_MODIFY_TRAFFIC_SPLIT_AFTER_START = 17;</code> */ public static final int CANNOT_MODIFY_TRAFFIC_SPLIT_AFTER_START_VALUE = 17; /** * <pre> * The experiment could not be found. * </pre> * * <code>EXPERIMENT_NOT_FOUND = 18;</code> */ public static final int EXPERIMENT_NOT_FOUND_VALUE = 18; /** * <pre> * Experiment has not begun. * </pre> * * <code>EXPERIMENT_NOT_YET_STARTED = 19;</code> */ public static final int EXPERIMENT_NOT_YET_STARTED_VALUE = 19; /** * <pre> * The experiment cannot have more than one control arm. * </pre> * * <code>CANNOT_HAVE_MULTIPLE_CONTROL_ARMS = 20;</code> */ public static final int CANNOT_HAVE_MULTIPLE_CONTROL_ARMS_VALUE = 20; /** * <pre> * The experiment doesn't set in-design campaigns. * </pre> * * <code>IN_DESIGN_CAMPAIGNS_NOT_SET = 21;</code> */ public static final int IN_DESIGN_CAMPAIGNS_NOT_SET_VALUE = 21; /** * <pre> * Clients must use the graduate action to graduate experiments and cannot * set the status to GRADUATED directly. * </pre> * * <code>CANNOT_SET_STATUS_TO_GRADUATED = 22;</code> */ public static final int CANNOT_SET_STATUS_TO_GRADUATED_VALUE = 22; /** * <pre> * Cannot use shared budget on base campaign when scheduling an experiment. * </pre> * * <code>CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_SHARED_BUDGET = 23;</code> */ public static final int CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_SHARED_BUDGET_VALUE = 23; /** * <pre> * Cannot use custom budget on base campaign when scheduling an experiment. * </pre> * * <code>CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_CUSTOM_BUDGET = 24;</code> */ public static final int CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_CUSTOM_BUDGET_VALUE = 24; /** * <pre> * Invalid status transition. * </pre> * * <code>STATUS_TRANSITION_INVALID = 25;</code> */ public static final int STATUS_TRANSITION_INVALID_VALUE = 25; /** * <pre> * The experiment campaign name conflicts with a pre-existing campaign. * </pre> * * <code>DUPLICATE_EXPERIMENT_CAMPAIGN_NAME = 26;</code> */ public static final int DUPLICATE_EXPERIMENT_CAMPAIGN_NAME_VALUE = 26; /** * <pre> * Cannot remove in creation experiments. * </pre> * * <code>CANNOT_REMOVE_IN_CREATION_EXPERIMENT = 27;</code> */ public static final int CANNOT_REMOVE_IN_CREATION_EXPERIMENT_VALUE = 27; /** * <pre> * Cannot add campaign with deprecated ad types. Deprecated ad types: * ENHANCED_DISPLAY, GALLERY, GMAIL, KEYWORDLESS, TEXT. * </pre> * * <code>CANNOT_ADD_CAMPAIGN_WITH_DEPRECATED_AD_TYPES = 28;</code> */ public static final int CANNOT_ADD_CAMPAIGN_WITH_DEPRECATED_AD_TYPES_VALUE = 28; /** * <pre> * Sync can only be enabled for supported experiment types. Supported * experiment types: SEARCH_CUSTOM, DISPLAY_CUSTOM, * DISPLAY_AUTOMATED_BIDDING_STRATEGY, SEARCH_AUTOMATED_BIDDING_STRATEGY. * </pre> * * <code>CANNOT_ENABLE_SYNC_FOR_UNSUPPORTED_EXPERIMENT_TYPE = 29;</code> */ public static final int CANNOT_ENABLE_SYNC_FOR_UNSUPPORTED_EXPERIMENT_TYPE_VALUE = 29; /** * <pre> * Experiment length cannot be longer than max length. * </pre> * * <code>INVALID_DURATION_FOR_AN_EXPERIMENT = 30;</code> */ public static final int INVALID_DURATION_FOR_AN_EXPERIMENT_VALUE = 30; /** * <pre> * The experiment's campaigns must self-declare whether they contain * political advertising that targets the European Union. * </pre> * * <code>MISSING_EU_POLITICAL_ADVERTISING_SELF_DECLARATION = 31;</code> */ public static final int MISSING_EU_POLITICAL_ADVERTISING_SELF_DECLARATION_VALUE = 31; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ExperimentError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ExperimentError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return CANNOT_SET_START_DATE_IN_PAST; case 3: return END_DATE_BEFORE_START_DATE; case 4: return START_DATE_TOO_FAR_IN_FUTURE; case 5: return DUPLICATE_EXPERIMENT_NAME; case 6: return CANNOT_MODIFY_REMOVED_EXPERIMENT; case 7: return START_DATE_ALREADY_PASSED; case 8: return CANNOT_SET_END_DATE_IN_PAST; case 9: return CANNOT_SET_STATUS_TO_REMOVED; case 10: return CANNOT_MODIFY_PAST_END_DATE; case 11: return INVALID_STATUS; case 12: return INVALID_CAMPAIGN_CHANNEL_TYPE; case 13: return OVERLAPPING_MEMBERS_AND_DATE_RANGE; case 14: return INVALID_TRIAL_ARM_TRAFFIC_SPLIT; case 15: return TRAFFIC_SPLIT_OVERLAPPING; case 16: return SUM_TRIAL_ARM_TRAFFIC_UNEQUALS_TO_TRIAL_TRAFFIC_SPLIT_DENOMINATOR; case 17: return CANNOT_MODIFY_TRAFFIC_SPLIT_AFTER_START; case 18: return EXPERIMENT_NOT_FOUND; case 19: return EXPERIMENT_NOT_YET_STARTED; case 20: return CANNOT_HAVE_MULTIPLE_CONTROL_ARMS; case 21: return IN_DESIGN_CAMPAIGNS_NOT_SET; case 22: return CANNOT_SET_STATUS_TO_GRADUATED; case 23: return CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_SHARED_BUDGET; case 24: return CANNOT_CREATE_EXPERIMENT_CAMPAIGN_WITH_CUSTOM_BUDGET; case 25: return STATUS_TRANSITION_INVALID; case 26: return DUPLICATE_EXPERIMENT_CAMPAIGN_NAME; case 27: return CANNOT_REMOVE_IN_CREATION_EXPERIMENT; case 28: return CANNOT_ADD_CAMPAIGN_WITH_DEPRECATED_AD_TYPES; case 29: return CANNOT_ENABLE_SYNC_FOR_UNSUPPORTED_EXPERIMENT_TYPE; case 30: return INVALID_DURATION_FOR_AN_EXPERIMENT; case 31: return MISSING_EU_POLITICAL_ADVERTISING_SELF_DECLARATION; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ExperimentError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< ExperimentError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ExperimentError>() { public ExperimentError findValueByNumber(int number) { return ExperimentError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ExperimentErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final ExperimentError[] VALUES = values(); public static ExperimentError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private ExperimentError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v21.errors.ExperimentErrorEnum.ExperimentError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.errors.ExperimentErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v21.errors.ExperimentErrorEnum other = (com.google.ads.googleads.v21.errors.ExperimentErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.errors.ExperimentErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible experiment error. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.ExperimentErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.errors.ExperimentErrorEnum) com.google.ads.googleads.v21.errors.ExperimentErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ExperimentErrorProto.internal_static_google_ads_googleads_v21_errors_ExperimentErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.ExperimentErrorProto.internal_static_google_ads_googleads_v21_errors_ExperimentErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.ExperimentErrorEnum.class, com.google.ads.googleads.v21.errors.ExperimentErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v21.errors.ExperimentErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.errors.ExperimentErrorProto.internal_static_google_ads_googleads_v21_errors_ExperimentErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.errors.ExperimentErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v21.errors.ExperimentErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.errors.ExperimentErrorEnum build() { com.google.ads.googleads.v21.errors.ExperimentErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.errors.ExperimentErrorEnum buildPartial() { com.google.ads.googleads.v21.errors.ExperimentErrorEnum result = new com.google.ads.googleads.v21.errors.ExperimentErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.errors.ExperimentErrorEnum) { return mergeFrom((com.google.ads.googleads.v21.errors.ExperimentErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.errors.ExperimentErrorEnum other) { if (other == com.google.ads.googleads.v21.errors.ExperimentErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.errors.ExperimentErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.errors.ExperimentErrorEnum) private static final com.google.ads.googleads.v21.errors.ExperimentErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.errors.ExperimentErrorEnum(); } public static com.google.ads.googleads.v21.errors.ExperimentErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExperimentErrorEnum> PARSER = new com.google.protobuf.AbstractParser<ExperimentErrorEnum>() { @java.lang.Override public ExperimentErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExperimentErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExperimentErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.errors.ExperimentErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/solr
34,948
solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.cloud; import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT; import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; import static org.apache.solr.common.params.SolrParams.wrapDefaults; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.SentinelIntSet; import org.apache.solr.CursorPagingTest; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.LukeRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.CursorMarkParams; import org.apache.solr.common.params.GroupParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.search.CursorMark; import org.junit.Test; /** * Distributed tests of deep paging using {@link CursorMark} and {@link * CursorMarkParams#CURSOR_MARK_PARAM}. * * <p>NOTE: this class Reuses some utilities from {@link CursorPagingTest} that assume the same * schema and configs. * * @see CursorPagingTest */ @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-9182 - causes OOM") public class DistribCursorPagingTest extends AbstractFullDistribZkTestBase { public DistribCursorPagingTest() { System.setProperty( "solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); configString = CursorPagingTest.TEST_SOLRCONFIG_NAME; schemaString = CursorPagingTest.TEST_SCHEMAXML_NAME; } @Override protected String getCloudSolrConfig() { return configString; } /** * A really obnoxious hack needed to get our elevate.xml into zk ... But simpler for now then * re-writing the whole test case using SolrCloudTestCase. */ @Override public void distribSetUp() throws Exception { super.distribSetUp(); ZkTestServer.putConfig("conf1", zkServer.getZkClient(), ZkTestServer.SOLRHOME, "elevate.xml"); } @Test public void test() throws Exception { boolean testFinished = false; try { handle.clear(); handle.put("timestamp", SKIPVAL); doBadInputTest(); del("*:*"); commit(); doSimpleTest(); del("*:*"); commit(); doRandomSortsOnLargeIndex(); del("*:*"); commit(); testFinished = true; } finally { if (!testFinished) { printLayoutOnTearDown = true; } } } private void doBadInputTest() throws Exception { // sometimes seed some data, other times use an empty index if (random().nextBoolean()) { indexDoc(sdoc("id", "42", "str", "z", "float", "99.99", "int", "42")); indexDoc(sdoc("id", "66", "str", "x", "float", "22.00", "int", "-66")); } else { del("*:*"); } commit(); // empty, blank, or bogus cursor for (String c : new String[] {"", " ", "all the docs please!"}) { assertFail( params("q", "*:*", "sort", "id desc", CURSOR_MARK_PARAM, c), ErrorCode.BAD_REQUEST, "Unable to parse"); } // no id in sort assertFail( params("q", "*:*", "sort", "score desc", CURSOR_MARK_PARAM, CURSOR_MARK_START), ErrorCode.BAD_REQUEST, "uniqueKey field"); // _docid_ assertFail( params("q", "*:*", "sort", "_docid_ asc, id desc", CURSOR_MARK_PARAM, CURSOR_MARK_START), ErrorCode.BAD_REQUEST, "_docid_"); // using cursor w/ grouping assertFail( params( "q", "*:*", "sort", "id desc", GroupParams.GROUP, "true", GroupParams.GROUP_FIELD, "str", CURSOR_MARK_PARAM, CURSOR_MARK_START), ErrorCode.BAD_REQUEST, "Grouping"); } private void doSimpleTest() throws Exception { String cursorMark = CURSOR_MARK_START; SolrParams params = null; QueryResponse rsp = null; final String intsort = "int" + (random().nextBoolean() ? "" : "_dv"); // trivial base case: ensure cursorMark against an empty index doesn't blow up cursorMark = CURSOR_MARK_START; params = params( "q", "*:*", "rows", "4", "fl", "id", "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(0, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals(cursorMark, assertHashNextCursorMark(rsp)); // don't add in order of either field to ensure we aren't inadvertantly // counting on internal docid ordering indexDoc(sdoc("id", "9", "str", "c", "float", "-3.2", "int", "42")); indexDoc(sdoc("id", "7", "str", "c", "float", "-3.2", "int", "-1976")); indexDoc(sdoc("id", "2", "str", "c", "float", "-3.2", "int", "666")); indexDoc(sdoc("id", "0", "str", "b", "float", "64.5", "int", "-42")); indexDoc(sdoc("id", "5", "str", "b", "float", "64.5", "int", "2001")); indexDoc(sdoc("id", "8", "str", "b", "float", "64.5", "int", "4055")); indexDoc(sdoc("id", "6", "str", "a", "float", "64.5", "int", "7")); indexDoc(sdoc("id", "1", "str", "a", "float", "64.5", "int", "7")); indexDoc(sdoc("id", "4", "str", "a", "float", "11.1", "int", "6")); indexDoc(sdoc("id", "3", "str", "a", "float", "11.1")); // int is missing commit(); // base case: ensure cursorMark that matches no docs doesn't blow up cursorMark = CURSOR_MARK_START; params = params( "q", "id:9999999", "rows", "4", "fl", "id", "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(0, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals(cursorMark, assertHashNextCursorMark(rsp)); // edge case: ensure rows=0 doesn't blow up and gives back same cursor for next cursorMark = CURSOR_MARK_START; params = params( "q", "*:*", "rows", "0", "fl", "id", "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals(cursorMark, assertHashNextCursorMark(rsp)); // simple id sort cursorMark = CURSOR_MARK_START; params = params( "q", "-int:6", "rows", "4", "fl", "id", "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(9, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 9, 8, 7, 6); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(9, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 5, 3, 2, 1); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(9, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 0); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(9, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // NOTE: because field stats and queryNorms can vary amongst shards, // not all "obvious" score based sorts can be iterated cleanly. // queries that seem like they should result in an obvious "tie" score // between two documents (and would tie in a single node case) may actually // get diff scores for diff docs if they are on diff shards // // so here, in this test, we can't assert a hardcoded score ordering -- we trust // the full walk testing (below) // int sort with duplicates, id tiebreaker ... and some faceting cursorMark = CURSOR_MARK_START; params = params( "q", "-int:2001 -int:4055", "rows", "3", "fl", "id", "facet", "true", "facet.field", "str", "facet.mincount", "1", "json.nl", "map", "sort", intsort + " asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 7, 0, 3); assertEquals(3, rsp.getFacetField("str").getValues().size()); assertEquals("a", rsp.getFacetField("str").getValues().get(0).getName()); assertEquals(4, rsp.getFacetField("str").getValues().get(0).getCount()); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 4, 1, 6); assertEquals("a", rsp.getFacetField("str").getValues().get(0).getName()); assertEquals(4, rsp.getFacetField("str").getValues().get(0).getCount()); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 9, 2); assertEquals("a", rsp.getFacetField("str").getValues().get(0).getName()); assertEquals(4, rsp.getFacetField("str").getValues().get(0).getCount()); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals("a", rsp.getFacetField("str").getValues().get(0).getName()); assertEquals(4, rsp.getFacetField("str").getValues().get(0).getCount()); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // int missing first sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; params = params( "q", "-int:2001 -int:4055", "rows", "3", "fl", "id", "json.nl", "map", "sort", intsort + "_first asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 3, 7, 0); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 4, 1, 6); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 9, 2); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // int missing last sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; params = params( "q", "-int:2001 -int:4055", "rows", "3", "fl", "id", "json.nl", "map", "sort", intsort + "_last asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 7, 0, 4); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 1, 6, 9); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 2, 3); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // string sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; params = params( "q", "*:*", "rows", "6", "fl", "id", "sort", "str asc, id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 6, 4, 3, 1, 8, 5); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 0, 9, 7, 2); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // tri-level sort with more dups of primary then fit on a page. // also a function based sort using a simple function(s) on same field // (order should be the same in all cases) for (String primarysort : new String[] {"float", "field('float')", "sum(float,42)"}) { cursorMark = CURSOR_MARK_START; params = params( "q", "*:*", "rows", "2", "fl", "id", "sort", primarysort + " asc, " + intsort + " desc, id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 2, 9); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 7, 4); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 3, 8); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 5, 6); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 1, 0); cursorMark = assertHashNextCursorMark(rsp); // we've exactly exhausted all the results, but solr had no way of know that // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); } // trivial base case: rows bigger then number of matches cursorMark = CURSOR_MARK_START; params = params( "q", "id:3 id:7", "rows", "111", "fl", "id", "sort", intsort + " asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(2, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 7, 3); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(2, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // sanity check our full walk method SentinelIntSet ids; ids = assertFullWalkNoDups( 10, params( "q", "*:*", "rows", "4", "sort", "id desc")); assertEquals(10, ids.size()); ids = assertFullWalkNoDups( 9, params( "q", "*:*", "rows", "1", "fq", "-id:4", "sort", "id asc")); assertEquals(9, ids.size()); assertFalse("matched on id:4 unexpectedly", ids.exists(4)); ids = assertFullWalkNoDups( 9, params( "q", "*:*", "rows", "3", "fq", "-id:6", "sort", "float desc, id asc, int asc")); assertEquals(9, ids.size()); assertFalse("matched on id:6 unexpectedly", ids.exists(6)); ids = assertFullWalkNoDups( 9, params( "q", "float:[0 TO *] int:7 id:6", "rows", "3", "sort", "score desc, id desc")); assertEquals(7, ids.size()); assertFalse("matched on id:9 unexpectedly", ids.exists(9)); assertFalse("matched on id:7 unexpectedly", ids.exists(7)); assertFalse("matched on id:2 unexpectedly", ids.exists(2)); // strategically delete/add some docs in the middle of walking the cursor cursorMark = CURSOR_MARK_START; params = params( "q", "*:*", "rows", "2", "fl", "id", "sort", "str asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 1, 3); cursorMark = assertHashNextCursorMark(rsp); // delete the last guy we got del("id:3"); commit(); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(9, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 4, 6); cursorMark = assertHashNextCursorMark(rsp); // delete the next guy we expect del("id:0"); commit(); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 5, 8); cursorMark = assertHashNextCursorMark(rsp); // update a doc we've already seen so it repeats indexDoc(sdoc("id", "5", "str", "c")); commit(); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAtBeginning(rsp); assertDocList(rsp, 2, 5); cursorMark = assertHashNextCursorMark(rsp); // update the next doc we expect so it's now in the past indexDoc(sdoc("id", "7", "str", "a")); commit(); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertDocList(rsp, 9); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertDocList(rsp); assertEquals( "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); } /** randomized testing of a non-trivial number of docs using assertFullWalkNoDups */ public void doRandomSortsOnLargeIndex() throws Exception { final Collection<String> allFieldNames = getAllSortFieldNames(); final int numInitialDocs = TestUtil.nextInt(random(), 100, 200); final int totalDocs = atLeast(500); // start with a smallish number of documents, and test that we can do a full walk using a // sort on *every* field in the schema... indexDocs( IntStream.rangeClosed(1, numInitialDocs) .mapToObj(CursorPagingTest::buildRandomDocument) .iterator()); commit(); for (String f : allFieldNames) { for (String order : new String[] {" asc", " desc"}) { String sort = f + order + ("id".equals(f) ? "" : ", id" + order); String rows = "" + TestUtil.nextInt(random(), 13, 50); final SolrParams main = params("q", "*:*", "fl", "id," + f, "rows", rows, "sort", sort); final SentinelIntSet ids = assertFullWalkNoDups(numInitialDocs, main); assertEquals(numInitialDocs, ids.size()); // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated // docs are first... final SentinelIntSet elevated = assertFullWalkNoDupsElevated( wrapDefaults( params( "qt", "/elevate", "fl", "id,[elevated]", "forceElevation", "true", "elevateIds", "50,20,80"), main), ids); assertTrue(elevated.exists(50)); assertTrue(elevated.exists(20)); assertTrue(elevated.exists(80)); assertEquals(3, elevated.size()); } } // now add a lot more docs, and test a handful of randomized multi-level sorts indexDocs( IntStream.rangeClosed(numInitialDocs + 1, totalDocs) .mapToObj(CursorPagingTest::buildRandomDocument) .iterator()); commit(); final int numRandomSorts = atLeast(3); for (int i = 0; i < numRandomSorts; i++) { final String sort = CursorPagingTest.buildRandomSort(allFieldNames); final String rows = "" + TestUtil.nextInt(random(), 63, 113); final String fl = random().nextBoolean() ? "id" : "id,score"; final boolean matchAll = random().nextBoolean(); final String q = matchAll ? "*:*" : CursorPagingTest.buildRandomQuery(); final SolrParams main = params( "q", q, "fl", fl, "rows", rows, "sort", sort); final SentinelIntSet ids = assertFullWalkNoDups( totalDocs, params( "q", q, "fl", fl, "rows", rows, "sort", sort)); if (matchAll) { assertEquals(totalDocs, ids.size()); } // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated // docs are first... // first we have to build a set of ids to elevate, from the set of ids known to match query... final int[] expectedElevated = CursorPagingTest.pickElevations(TestUtil.nextInt(random(), 3, 33), ids); final SentinelIntSet elevated = assertFullWalkNoDupsElevated( wrapDefaults( params( "qt", "/elevate", "fl", fl + ",[elevated]", // HACK: work around SOLR-15307... same results should match, just not same // order "sort", (sort.startsWith("score asc") ? "score desc, " + sort : sort), "forceElevation", "true", "elevateIds", Arrays.stream(expectedElevated) .mapToObj(String::valueOf) .collect(Collectors.joining(","))), main), ids); for (int expected : expectedElevated) { assertTrue( expected + " wasn't elevated even though it should have been", elevated.exists(expected)); } assertEquals(expectedElevated.length, elevated.size()); } } /** * Asks the LukeRequestHandler on the control client for a list of the fields in the schema and * then prunes that list down to just the fields that can be used for sorting, and returns them as * an immutable list in a deterministically random order. */ private List<String> getAllSortFieldNames() throws SolrServerException, IOException { LukeRequest req = new LukeRequest("/admin/luke"); req.setShowSchema(true); NamedList<Object> rsp = controlClient.request(req); @SuppressWarnings({"unchecked"}) NamedList<Object> fields = (NamedList) ((NamedList) rsp.get("schema")).get("fields"); ArrayList<String> names = new ArrayList<>(fields.size()); for (Map.Entry<String, Object> item : fields) { names.add(item.getKey()); } return CursorPagingTest.pruneAndDeterministicallySort(names); } /** execute a request, verify that we get an expected error */ public void assertFail(SolrParams p, ErrorCode expCode, String expSubstr) throws Exception { try { ignoreException(expSubstr); query(p); fail("no exception matching expected: " + expCode.code + ": " + expSubstr); } catch (SolrException e) { assertEquals(expCode.code, e.code()); assertTrue( "Expected substr not found: " + expSubstr + " <!< " + e.getMessage(), e.getMessage().contains(expSubstr)); } finally { unIgnoreException(expSubstr); } } /** * Given a QueryResponse returned by SolrServer.query, asserts that the numFound on the doc list * matches the expectation * * @see org.apache.solr.client.solrj.SolrClient#query */ private void assertNumFound(int expected, QueryResponse rsp) { assertEquals(expected, extractDocList(rsp).getNumFound()); } /** * Given a QueryResponse returned by SolrServer.query, asserts that the start on the doc list * starts at the beginning * * @see org.apache.solr.client.solrj.SolrClient#query */ private void assertStartsAtBeginning(QueryResponse rsp) { assertEquals(0, extractDocList(rsp).getStart()); } /** * Given a QueryResponse returned by SolrServer.query, asserts that the "id" of the list of * documents returned matches the expected list * * @see org.apache.solr.client.solrj.SolrClient#query */ private void assertDocList(QueryResponse rsp, int... ids) { SolrDocumentList docs = extractDocList(rsp); assertEquals("Wrong number of docs in response", ids.length, docs.size()); int i = 0; for (int id : ids) { assertEquals(rsp.toString(), "" + id, docs.get(i).get("id")); i++; } } /** * Given a QueryResponse returned by SolrServer.query, asserts that the response does include * {@link CursorMarkParams#CURSOR_MARK_NEXT} key and returns it * * @see org.apache.solr.client.solrj.SolrClient#query */ private String assertHashNextCursorMark(QueryResponse rsp) { String r = rsp.getNextCursorMark(); assertNotNull(CURSOR_MARK_NEXT + " is null/missing", r); return r; } private SolrDocumentList extractDocList(QueryResponse rsp) { SolrDocumentList docs = rsp.getResults(); assertNotNull("docList is null", docs); return docs; } /** * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as * long as a non-0 number of docs ar returned. This method records the the set of all id's (must * be positive ints) encountered and throws an assertion failure if any id is encountered more * than once, or if an id is encountered which is not expected, or if an id is <code>[elevated] * </code> and comes "after" any ids which were not <code>[elevated]</code> * * @return set of all elevated ids encountered in the walk * @see #assertFullWalkNoDups(SolrParams,Consumer) */ public SentinelIntSet assertFullWalkNoDupsElevated( final SolrParams params, final SentinelIntSet allExpected) throws Exception { final SentinelIntSet ids = new SentinelIntSet(allExpected.size(), -1); final SentinelIntSet idsElevated = new SentinelIntSet(32, -1); assertFullWalkNoDups( params, (doc) -> { final int id = Integer.parseInt(doc.get("id").toString()); final boolean elevated = Boolean.parseBoolean(doc.getOrDefault("[elevated]", "false").toString()); assertTrue(id + " is not expected to match query", allExpected.exists(id)); if (ids.exists(id)) { String msg = "walk already seen: " + id; try { try { queryAndCompareShards(params("distrib", "false", "q", "id:" + id)); } catch (AssertionError ae) { throw new AssertionError( msg + ", found shard inconsistency that would explain it...", ae); } final QueryResponse rsp = cloudClient.query(params("q", "id:" + id)); throw new AssertionError( msg + ", don't know why; q=id:" + id + " gives: " + rsp.toString()); } catch (Exception e) { throw new AssertionError(msg + ", exception trying to fiture out why...", e); } } if (elevated) { assertEquals( "id is elevated, but we've already seen non elevated ids: " + id, idsElevated.size(), ids.size()); idsElevated.put(id); } ids.put(id); }); assertEquals("total number of ids seen did not match expected", allExpected.size(), ids.size()); return idsElevated; } /** * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as * long as a non-0 number of docs ar returned. This method records the the set of all id's (must * be positive ints) encountered and throws an assertion failure if any id is encountered more * then once, or if the set grows above maxSize * * <p>Note that this method explicitly uses the "cloudClient" for executing the queries, instead * of relying on the test infrastructure to execute the queries redundently against both the cloud * client as well as a control client. This is because term stat differences in a sharded setup * can result in different scores for documents compared to the control index -- which can affect * the sorting in some cases and cause false negatives in the response comparisons (even if we * don't include "score" in the "fl") * * @return set of all ids encountered in the walk * @see #assertFullWalkNoDups(SolrParams,Consumer) */ public SentinelIntSet assertFullWalkNoDups(int maxSize, SolrParams params) throws Exception { final SentinelIntSet ids = new SentinelIntSet(maxSize, -1); assertFullWalkNoDups( params, (doc) -> { int id = Integer.parseInt(doc.getFieldValue("id").toString()); if (ids.exists(id)) { String msg = "walk already seen: " + id; try { try { queryAndCompareShards(params("distrib", "false", "q", "id:" + id)); } catch (AssertionError ae) { throw new AssertionError( msg + ", found shard inconsistency that would explain it...", ae); } final QueryResponse rsp = cloudClient.query(params("q", "id:" + id)); throw new AssertionError( msg + ", don't know why; q=id:" + id + " gives: " + rsp.toString()); } catch (Exception e) { throw new AssertionError(msg + ", exception trying to fiture out why...", e); } } ids.put(id); assertFalse( "id set bigger then max allowed (" + maxSize + "): " + ids.size(), maxSize < ids.size()); }); return ids; } /** * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as * long as a non-0 number of docs ar returned. This method does some basic validation of each * response, and then passes each doc encountered (in order returned) to the specified Consumer, * which may throw an assertion if there is a problem. * * <p>Note that this method explicitly uses the "cloudClient" for executing the queries, instead * of relying on the test infrastructure to execute the queries redundently against both the cloud * client as well as a control client. This is because term stat differences in a sharded setup * can result in different scores for documents compared to the control index -- which can affect * the sorting in some cases and cause false negatives in the response comparisons (even if we * don't include "score" in the "fl") */ public void assertFullWalkNoDups(SolrParams params, Consumer<SolrDocument> consumer) throws Exception { String cursorMark = CURSOR_MARK_START; int docsOnThisPage = Integer.MAX_VALUE; while (0 < docsOnThisPage) { final SolrParams p = p(params, CURSOR_MARK_PARAM, cursorMark); QueryResponse rsp = cloudClient.query(p); String nextCursorMark = assertHashNextCursorMark(rsp); SolrDocumentList docs = extractDocList(rsp); docsOnThisPage = docs.size(); if (null != params.getInt(CommonParams.ROWS)) { int rows = params.getInt(CommonParams.ROWS); assertTrue( "Too many docs on this page: " + rows + " < " + docsOnThisPage, docsOnThisPage <= rows); } if (0 == docsOnThisPage) { assertEquals( "no more docs, but " + CURSOR_MARK_NEXT + " isn't same", cursorMark, nextCursorMark); } for (SolrDocument doc : docs) { consumer.accept(doc); } cursorMark = nextCursorMark; } } private SolrParams p(SolrParams params, String... other) { SolrParams extras = params(other); return SolrParams.wrapDefaults(params, extras); } }
googleapis/google-cloud-java
35,399
java-video-intelligence/proto-google-cloud-video-intelligence-v1/src/main/java/com/google/cloud/videointelligence/v1/PersonDetectionAnnotation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1; /** * * * <pre> * Person detection annotation per video. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.PersonDetectionAnnotation} */ public final class PersonDetectionAnnotation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1.PersonDetectionAnnotation) PersonDetectionAnnotationOrBuilder { private static final long serialVersionUID = 0L; // Use PersonDetectionAnnotation.newBuilder() to construct. private PersonDetectionAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PersonDetectionAnnotation() { tracks_ = java.util.Collections.emptyList(); version_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PersonDetectionAnnotation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_PersonDetectionAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_PersonDetectionAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.PersonDetectionAnnotation.class, com.google.cloud.videointelligence.v1.PersonDetectionAnnotation.Builder.class); } public static final int TRACKS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.videointelligence.v1.Track> tracks_; /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.videointelligence.v1.Track> getTracksList() { return tracks_; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.videointelligence.v1.TrackOrBuilder> getTracksOrBuilderList() { return tracks_; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ @java.lang.Override public int getTracksCount() { return tracks_.size(); } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.Track getTracks(int index) { return tracks_.get(index); } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.TrackOrBuilder getTracksOrBuilder(int index) { return tracks_.get(index); } public static final int VERSION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object version_ = ""; /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The version. */ @java.lang.Override public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The bytes for version. */ @java.lang.Override public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tracks_.size(); i++) { output.writeMessage(1, tracks_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tracks_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tracks_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1.PersonDetectionAnnotation)) { return super.equals(obj); } com.google.cloud.videointelligence.v1.PersonDetectionAnnotation other = (com.google.cloud.videointelligence.v1.PersonDetectionAnnotation) obj; if (!getTracksList().equals(other.getTracksList())) return false; if (!getVersion().equals(other.getVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTracksCount() > 0) { hash = (37 * hash) + TRACKS_FIELD_NUMBER; hash = (53 * hash) + getTracksList().hashCode(); } hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1.PersonDetectionAnnotation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Person detection annotation per video. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.PersonDetectionAnnotation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1.PersonDetectionAnnotation) com.google.cloud.videointelligence.v1.PersonDetectionAnnotationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_PersonDetectionAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_PersonDetectionAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.PersonDetectionAnnotation.class, com.google.cloud.videointelligence.v1.PersonDetectionAnnotation.Builder.class); } // Construct using com.google.cloud.videointelligence.v1.PersonDetectionAnnotation.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (tracksBuilder_ == null) { tracks_ = java.util.Collections.emptyList(); } else { tracks_ = null; tracksBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); version_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_PersonDetectionAnnotation_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1.PersonDetectionAnnotation getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1.PersonDetectionAnnotation.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1.PersonDetectionAnnotation build() { com.google.cloud.videointelligence.v1.PersonDetectionAnnotation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1.PersonDetectionAnnotation buildPartial() { com.google.cloud.videointelligence.v1.PersonDetectionAnnotation result = new com.google.cloud.videointelligence.v1.PersonDetectionAnnotation(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.videointelligence.v1.PersonDetectionAnnotation result) { if (tracksBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tracks_ = java.util.Collections.unmodifiableList(tracks_); bitField0_ = (bitField0_ & ~0x00000001); } result.tracks_ = tracks_; } else { result.tracks_ = tracksBuilder_.build(); } } private void buildPartial0( com.google.cloud.videointelligence.v1.PersonDetectionAnnotation result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.version_ = version_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1.PersonDetectionAnnotation) { return mergeFrom((com.google.cloud.videointelligence.v1.PersonDetectionAnnotation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.videointelligence.v1.PersonDetectionAnnotation other) { if (other == com.google.cloud.videointelligence.v1.PersonDetectionAnnotation.getDefaultInstance()) return this; if (tracksBuilder_ == null) { if (!other.tracks_.isEmpty()) { if (tracks_.isEmpty()) { tracks_ = other.tracks_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTracksIsMutable(); tracks_.addAll(other.tracks_); } onChanged(); } } else { if (!other.tracks_.isEmpty()) { if (tracksBuilder_.isEmpty()) { tracksBuilder_.dispose(); tracksBuilder_ = null; tracks_ = other.tracks_; bitField0_ = (bitField0_ & ~0x00000001); tracksBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTracksFieldBuilder() : null; } else { tracksBuilder_.addAllMessages(other.tracks_); } } } if (!other.getVersion().isEmpty()) { version_ = other.version_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.videointelligence.v1.Track m = input.readMessage( com.google.cloud.videointelligence.v1.Track.parser(), extensionRegistry); if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.add(m); } else { tracksBuilder_.addMessage(m); } break; } // case 10 case 18: { version_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.videointelligence.v1.Track> tracks_ = java.util.Collections.emptyList(); private void ensureTracksIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { tracks_ = new java.util.ArrayList<com.google.cloud.videointelligence.v1.Track>(tracks_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.Track, com.google.cloud.videointelligence.v1.Track.Builder, com.google.cloud.videointelligence.v1.TrackOrBuilder> tracksBuilder_; /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.Track> getTracksList() { if (tracksBuilder_ == null) { return java.util.Collections.unmodifiableList(tracks_); } else { return tracksBuilder_.getMessageList(); } } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public int getTracksCount() { if (tracksBuilder_ == null) { return tracks_.size(); } else { return tracksBuilder_.getCount(); } } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public com.google.cloud.videointelligence.v1.Track getTracks(int index) { if (tracksBuilder_ == null) { return tracks_.get(index); } else { return tracksBuilder_.getMessage(index); } } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder setTracks(int index, com.google.cloud.videointelligence.v1.Track value) { if (tracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTracksIsMutable(); tracks_.set(index, value); onChanged(); } else { tracksBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder setTracks( int index, com.google.cloud.videointelligence.v1.Track.Builder builderForValue) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.set(index, builderForValue.build()); onChanged(); } else { tracksBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder addTracks(com.google.cloud.videointelligence.v1.Track value) { if (tracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTracksIsMutable(); tracks_.add(value); onChanged(); } else { tracksBuilder_.addMessage(value); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder addTracks(int index, com.google.cloud.videointelligence.v1.Track value) { if (tracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTracksIsMutable(); tracks_.add(index, value); onChanged(); } else { tracksBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder addTracks(com.google.cloud.videointelligence.v1.Track.Builder builderForValue) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.add(builderForValue.build()); onChanged(); } else { tracksBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder addTracks( int index, com.google.cloud.videointelligence.v1.Track.Builder builderForValue) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.add(index, builderForValue.build()); onChanged(); } else { tracksBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder addAllTracks( java.lang.Iterable<? extends com.google.cloud.videointelligence.v1.Track> values) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tracks_); onChanged(); } else { tracksBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder clearTracks() { if (tracksBuilder_ == null) { tracks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tracksBuilder_.clear(); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public Builder removeTracks(int index) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.remove(index); onChanged(); } else { tracksBuilder_.remove(index); } return this; } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public com.google.cloud.videointelligence.v1.Track.Builder getTracksBuilder(int index) { return getTracksFieldBuilder().getBuilder(index); } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public com.google.cloud.videointelligence.v1.TrackOrBuilder getTracksOrBuilder(int index) { if (tracksBuilder_ == null) { return tracks_.get(index); } else { return tracksBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public java.util.List<? extends com.google.cloud.videointelligence.v1.TrackOrBuilder> getTracksOrBuilderList() { if (tracksBuilder_ != null) { return tracksBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tracks_); } } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public com.google.cloud.videointelligence.v1.Track.Builder addTracksBuilder() { return getTracksFieldBuilder() .addBuilder(com.google.cloud.videointelligence.v1.Track.getDefaultInstance()); } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public com.google.cloud.videointelligence.v1.Track.Builder addTracksBuilder(int index) { return getTracksFieldBuilder() .addBuilder(index, com.google.cloud.videointelligence.v1.Track.getDefaultInstance()); } /** * * * <pre> * The detected tracks of a person. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 1;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.Track.Builder> getTracksBuilderList() { return getTracksFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.Track, com.google.cloud.videointelligence.v1.Track.Builder, com.google.cloud.videointelligence.v1.TrackOrBuilder> getTracksFieldBuilder() { if (tracksBuilder_ == null) { tracksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.Track, com.google.cloud.videointelligence.v1.Track.Builder, com.google.cloud.videointelligence.v1.TrackOrBuilder>( tracks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); tracks_ = null; } return tracksBuilder_; } private java.lang.Object version_ = ""; /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The version. */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The bytes for version. */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return This builder for chaining. */ public Builder clearVersion() { version_ = getDefaultInstance().getVersion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @param value The bytes for version to set. * @return This builder for chaining. */ public Builder setVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1.PersonDetectionAnnotation) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.PersonDetectionAnnotation) private static final com.google.cloud.videointelligence.v1.PersonDetectionAnnotation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1.PersonDetectionAnnotation(); } public static com.google.cloud.videointelligence.v1.PersonDetectionAnnotation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PersonDetectionAnnotation> PARSER = new com.google.protobuf.AbstractParser<PersonDetectionAnnotation>() { @java.lang.Override public PersonDetectionAnnotation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PersonDetectionAnnotation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PersonDetectionAnnotation> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1.PersonDetectionAnnotation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
hibernate/hibernate-search
34,069
integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/sort/FieldSortTypeCheckingAndConversionIT.java
/* * SPDX-License-Identifier: Apache-2.0 * Copyright Red Hat Inc. and Hibernate Authors */ package org.hibernate.search.integrationtest.backend.tck.search.sort; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert.assertThatQuery; import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; import org.hibernate.search.engine.backend.common.DocumentReference; import org.hibernate.search.engine.backend.document.DocumentElement; import org.hibernate.search.engine.backend.document.IndexObjectFieldReference; import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement; import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaObjectField; import org.hibernate.search.engine.backend.types.Aggregable; import org.hibernate.search.engine.backend.types.ObjectStructure; import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.backend.types.Searchable; import org.hibernate.search.engine.backend.types.Sortable; import org.hibernate.search.engine.backend.types.dsl.StandardIndexFieldTypeOptionsStep; import org.hibernate.search.engine.reporting.spi.EventContexts; import org.hibernate.search.engine.search.common.ValueModel; import org.hibernate.search.engine.search.query.SearchQuery; import org.hibernate.search.engine.search.sort.dsl.SearchSortFactory; import org.hibernate.search.engine.search.sort.dsl.SortFinalStep; import org.hibernate.search.integrationtest.backend.tck.testsupport.types.FieldTypeDescriptor; import org.hibernate.search.integrationtest.backend.tck.testsupport.types.NormalizedStringFieldTypeDescriptor; import org.hibernate.search.integrationtest.backend.tck.testsupport.types.StandardFieldTypeDescriptor; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.InvalidType; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModel; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModelsByType; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.TckConfiguration; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.ValueWrapper; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.extension.SearchSetupHelper; import org.hibernate.search.util.common.SearchException; import org.hibernate.search.util.impl.integrationtest.common.reporting.FailureReportUtils; import org.hibernate.search.util.impl.integrationtest.mapper.stub.BulkIndexer; import org.hibernate.search.util.impl.integrationtest.mapper.stub.SimpleMappedIndex; import org.hibernate.search.util.impl.integrationtest.mapper.stub.StubMappingScope; import org.hibernate.search.util.impl.test.annotation.TestForIssue; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; /** * Tests behavior related to type checking and type conversion of DSL arguments * for sorts by field value. */ class FieldSortTypeCheckingAndConversionIT<F> { private static final List<StandardFieldTypeDescriptor<?>> supportedFieldTypes = new ArrayList<>(); private static final List<Arguments> parameters = new ArrayList<>(); static { for ( StandardFieldTypeDescriptor<?> fieldType : FieldTypeDescriptor.getAllStandard() ) { if ( fieldType.isFieldSortSupported() ) { supportedFieldTypes.add( fieldType ); parameters.add( Arguments.of( fieldType ) ); } } } public static List<? extends Arguments> params() { return parameters; } private static final String DOCUMENT_1 = "1"; private static final String DOCUMENT_2 = "2"; private static final String DOCUMENT_3 = "3"; private static final String EMPTY = "empty"; private static final String COMPATIBLE_INDEX_DOCUMENT_1 = "compatible_1"; private static final String RAW_FIELD_COMPATIBLE_INDEX_DOCUMENT_1 = "raw_field_compatible_1"; private static final String MISSING_FIELD_INDEX_DOCUMENT_1 = "missing_field_1"; private static final int BEFORE_DOCUMENT_1_ORDINAL = 0; private static final int DOCUMENT_1_ORDINAL = 1; private static final int BETWEEN_DOCUMENT_1_AND_2_ORDINAL = 2; private static final int DOCUMENT_2_ORDINAL = 3; private static final int BETWEEN_DOCUMENT_2_AND_3_ORDINAL = 4; private static final int DOCUMENT_3_ORDINAL = 5; private static final int AFTER_DOCUMENT_3_ORDINAL = 6; @RegisterExtension public static SearchSetupHelper setupHelper = SearchSetupHelper.create(); private static final SimpleMappedIndex<IndexBinding> mainIndex = SimpleMappedIndex.of( IndexBinding::new ).name( "main" ); private static final SimpleMappedIndex<CompatibleIndexBinding> compatibleIndex = SimpleMappedIndex.of( CompatibleIndexBinding::new ).name( "compatible" ); private static final SimpleMappedIndex<RawFieldCompatibleIndexBinding> rawFieldCompatibleIndex = SimpleMappedIndex.of( RawFieldCompatibleIndexBinding::new ).name( "rawFieldCompatible" ); private static final SimpleMappedIndex<MissingFieldIndexBinding> missingFieldIndex = SimpleMappedIndex.of( MissingFieldIndexBinding::new ).name( "missingField" ); private static final SimpleMappedIndex<IncompatibleIndexBinding> incompatibleIndex = SimpleMappedIndex.of( IncompatibleIndexBinding::new ).name( "incompatible" ); @BeforeAll static void setup() { setupHelper.start() .withIndexes( mainIndex, compatibleIndex, rawFieldCompatibleIndex, missingFieldIndex, incompatibleIndex ) .setup(); initData(); } @ParameterizedTest(name = "{0}") @MethodSource("params") void withDslConverters_dslConverterEnabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { SearchQuery<DocumentReference> query; String fieldPath = getFieldWithDslConverterPath( fieldTypeDescriptor ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( new ValueWrapper<>( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ) ) ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), EMPTY, DOCUMENT_1, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( new ValueWrapper<>( getSingleValueForMissingUse( BETWEEN_DOCUMENT_1_AND_2_ORDINAL, fieldTypeDescriptor ) ) ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, EMPTY, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( new ValueWrapper<>( getSingleValueForMissingUse( BETWEEN_DOCUMENT_2_AND_3_ORDINAL, fieldTypeDescriptor ) ) ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, EMPTY, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( new ValueWrapper<>( getSingleValueForMissingUse( AFTER_DOCUMENT_3_ORDINAL, fieldTypeDescriptor ) ) ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, DOCUMENT_3, EMPTY ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void withDslConverters_dslConverterDisabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { SearchQuery<DocumentReference> query; String fieldPath = getFieldWithDslConverterPath( fieldTypeDescriptor ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), EMPTY, DOCUMENT_1, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BETWEEN_DOCUMENT_1_AND_2_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, EMPTY, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BETWEEN_DOCUMENT_2_AND_3_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, EMPTY, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( AFTER_DOCUMENT_3_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, DOCUMENT_3, EMPTY ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void withDslConverters_dslConverterIndex(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { SearchQuery<DocumentReference> query; String fieldPath = getFieldWithDslConverterPath( fieldTypeDescriptor ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), EMPTY, DOCUMENT_1, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BETWEEN_DOCUMENT_1_AND_2_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, EMPTY, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BETWEEN_DOCUMENT_2_AND_3_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, EMPTY, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( AFTER_DOCUMENT_3_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, DOCUMENT_3, EMPTY ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void withDslConverters_dslConverterRaw(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { SearchQuery<DocumentReference> query; String fieldPath = getFieldWithDslConverterPath( fieldTypeDescriptor ); BiFunction<FieldTypeDescriptor<F, ?>, F, Object> toRawValue = TckConfiguration.get().getBackendFeatures()::toSortRawValue; query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( toRawValue.apply( fieldTypeDescriptor, getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ) ), ValueModel.RAW ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), EMPTY, DOCUMENT_1, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( toRawValue.apply( fieldTypeDescriptor, getSingleValueForMissingUse( BETWEEN_DOCUMENT_1_AND_2_ORDINAL, fieldTypeDescriptor ) ), ValueModel.RAW ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, EMPTY, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( toRawValue.apply( fieldTypeDescriptor, getSingleValueForMissingUse( BETWEEN_DOCUMENT_2_AND_3_ORDINAL, fieldTypeDescriptor ) ), ValueModel.RAW ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, EMPTY, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( toRawValue.apply( fieldTypeDescriptor, getSingleValueForMissingUse( AFTER_DOCUMENT_3_ORDINAL, fieldTypeDescriptor ) ), ValueModel.RAW ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, DOCUMENT_3, EMPTY ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void withDslConverters_dslConverterString(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { SearchQuery<DocumentReference> query; String fieldPath = getFieldWithDslConverterPath( fieldTypeDescriptor ); BiFunction<FieldTypeDescriptor<F, ?>, F, String> format = TckConfiguration.get().getBackendFeatures()::toStringValue; query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( format.apply( fieldTypeDescriptor, getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ) ), ValueModel.STRING ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), EMPTY, DOCUMENT_1, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( format.apply( fieldTypeDescriptor, getSingleValueForMissingUse( BETWEEN_DOCUMENT_1_AND_2_ORDINAL, fieldTypeDescriptor ) ), ValueModel.STRING ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, EMPTY, DOCUMENT_2, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( format.apply( fieldTypeDescriptor, getSingleValueForMissingUse( BETWEEN_DOCUMENT_2_AND_3_ORDINAL, fieldTypeDescriptor ) ), ValueModel.STRING ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, EMPTY, DOCUMENT_3 ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( format.apply( fieldTypeDescriptor, getSingleValueForMissingUse( AFTER_DOCUMENT_3_ORDINAL, fieldTypeDescriptor ) ), ValueModel.STRING ) ); assertThatQuery( query ) .hasDocRefHitsExactOrder( mainIndex.typeName(), DOCUMENT_1, DOCUMENT_2, DOCUMENT_3, EMPTY ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void invalidType_noDslConverter(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { StubMappingScope scope = mainIndex.createScope(); String absoluteFieldPath = getFieldPath( fieldTypeDescriptor ); Object invalidValueToMatch = new InvalidType(); assertThatThrownBy( () -> scope.sort().field( absoluteFieldPath ).missing() .use( invalidValueToMatch ), "field() sort with invalid parameter type for missing().use() on field " + absoluteFieldPath ) .isInstanceOf( SearchException.class ) .hasMessageContaining( "Unable to convert DSL argument: " ) .hasMessageContaining( InvalidType.class.getName() ) .hasCauseInstanceOf( ClassCastException.class ) .satisfies( FailureReportUtils.hasContext( EventContexts.fromIndexFieldAbsolutePath( absoluteFieldPath ) ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void invalidType_withDslConverter(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { StubMappingScope scope = mainIndex.createScope(); String absoluteFieldPath = getFieldWithDslConverterPath( fieldTypeDescriptor ); Object invalidValueToMatch = new InvalidType(); assertThatThrownBy( () -> scope.sort().field( absoluteFieldPath ).missing() .use( invalidValueToMatch ), "field() sort with invalid parameter type for missing().use() on field " + absoluteFieldPath ) .isInstanceOf( SearchException.class ) .hasMessageContaining( "Unable to convert DSL argument: " ) .hasMessageContaining( InvalidType.class.getName() ) .hasCauseInstanceOf( ClassCastException.class ) .satisfies( FailureReportUtils.hasContext( EventContexts.fromIndexFieldAbsolutePath( absoluteFieldPath ) ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void multiIndex_withCompatibleIndex_usingField(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { StubMappingScope scope = mainIndex.createScope( compatibleIndex ); SearchQuery<DocumentReference> query; String fieldPath = getFieldPath( fieldTypeDescriptor ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ) ), scope ); /* * Not testing the ordering of results here because some documents have the same value. * It's not what we want to test anyway: we just want to check that fields are correctly * detected as compatible and that no exception is thrown. */ assertThatQuery( query ).hasDocRefHitsAnyOrder( b -> { b.doc( mainIndex.typeName(), EMPTY ); b.doc( mainIndex.typeName(), DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_2 ); b.doc( mainIndex.typeName(), DOCUMENT_3 ); b.doc( compatibleIndex.typeName(), COMPATIBLE_INDEX_DOCUMENT_1 ); } ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void multiIndex_withRawFieldCompatibleIndex_dslConverterEnabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { StubMappingScope scope = mainIndex.createScope( rawFieldCompatibleIndex ); String fieldPath = getFieldPath( fieldTypeDescriptor ); assertThatThrownBy( () -> { matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( new ValueWrapper<>( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ) ) ), scope ); } ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Inconsistent configuration for field '" + fieldPath + "' in a search query across multiple indexes", "Attribute 'mappingDslConverter' differs", " vs. " ) .satisfies( FailureReportUtils.hasContext( EventContexts.fromIndexNames( mainIndex.name(), rawFieldCompatibleIndex.name() ) ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void multiIndex_withRawFieldCompatibleIndex_dslConverterDisabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { StubMappingScope scope = mainIndex.createScope( rawFieldCompatibleIndex ); SearchQuery<DocumentReference> query; String fieldPath = getFieldPath( fieldTypeDescriptor ); query = matchAllQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ), scope ); /* * Not testing the ordering of results here because some documents have the same value. * It's not what we want to test anyway: we just want to check that fields are correctly * detected as compatible and that no exception is thrown. */ assertThatQuery( query ).hasDocRefHitsAnyOrder( b -> { b.doc( mainIndex.typeName(), EMPTY ); b.doc( mainIndex.typeName(), DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_2 ); b.doc( mainIndex.typeName(), DOCUMENT_3 ); b.doc( rawFieldCompatibleIndex.typeName(), RAW_FIELD_COMPATIBLE_INDEX_DOCUMENT_1 ); } ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @TestForIssue(jiraKey = "HSEARCH-4173") void multiIndex_withMissingFieldIndex_dslConverterEnabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { assumeTrue( TckConfiguration.get().getBackendFeatures() .supportsFieldSortWhenFieldMissingInSomeTargetIndexes( fieldTypeDescriptor.getJavaType() ), "This backend doesn't support sorts on a field of type '" + fieldTypeDescriptor + "' that is missing from some of the target indexes." ); StubMappingScope scope = mainIndex.createScope( missingFieldIndex ); SearchQuery<DocumentReference> query; String fieldPath = getFieldPath( fieldTypeDescriptor ); query = matchNonEmptyQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ) ), scope ); /* * Not testing the ordering of results here because it's not what we are interested in: * we just want to check that fields are correctly detected as compatible, * that no exception is thrown and that the query is correctly executed on all indexes * with no silent error (HSEARCH-4173). */ assertThatQuery( query ).hasDocRefHitsAnyOrder( b -> { b.doc( missingFieldIndex.typeName(), MISSING_FIELD_INDEX_DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_2 ); b.doc( mainIndex.typeName(), DOCUMENT_3 ); } ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @TestForIssue(jiraKey = "HSEARCH-4173") void multiIndex_withMissingFieldIndex_dslConverterDisabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { assumeTrue( TckConfiguration.get().getBackendFeatures() .supportsFieldSortWhenFieldMissingInSomeTargetIndexes( fieldTypeDescriptor.getJavaType() ), "This backend doesn't support sorts on a field of type '" + fieldTypeDescriptor + "' that is missing from some of the target indexes." ); StubMappingScope scope = mainIndex.createScope( missingFieldIndex ); SearchQuery<DocumentReference> query; String fieldPath = getFieldPath( fieldTypeDescriptor ); query = matchNonEmptyQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ), ValueModel.INDEX ), scope ); /* * Not testing the ordering of results here because it's not what we are interested in: * we just want to check that fields are correctly detected as compatible, * that no exception is thrown and that the query is correctly executed on all indexes * with no silent error (HSEARCH-4173). */ assertThatQuery( query ).hasDocRefHitsAnyOrder( b -> { b.doc( missingFieldIndex.typeName(), MISSING_FIELD_INDEX_DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_2 ); b.doc( mainIndex.typeName(), DOCUMENT_3 ); } ); } /** * Test the behavior when even the <strong>parent</strong> field of the field to sort on is missing, * and that parent field is <strong>nested</strong> in the main index. */ @ParameterizedTest(name = "{0}") @MethodSource("params") @TestForIssue(jiraKey = "HSEARCH-4173") void multiIndex_withMissingFieldIndex_nested(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { assumeTrue( TckConfiguration.get().getBackendFeatures() .supportsFieldSortWhenFieldMissingInSomeTargetIndexes( fieldTypeDescriptor.getJavaType() ), "This backend doesn't support sorts on a field of type '" + fieldTypeDescriptor + "' that is missing from some of the target indexes." ); StubMappingScope scope = mainIndex.createScope( missingFieldIndex ); SearchQuery<DocumentReference> query; String fieldPath = getFieldInNestedPath( fieldTypeDescriptor ); query = matchNonEmptyQuery( f -> f.field( fieldPath ).asc().missing() .use( getSingleValueForMissingUse( BEFORE_DOCUMENT_1_ORDINAL, fieldTypeDescriptor ) ), scope ); /* * Not testing the ordering of results here because it's not what we are interested in: * we just want to check that fields are correctly detected as compatible, * that no exception is thrown and that the query is correctly executed on all indexes * with no silent error (HSEARCH-4173). */ assertThatQuery( query ).hasDocRefHitsAnyOrder( b -> { b.doc( missingFieldIndex.typeName(), MISSING_FIELD_INDEX_DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_1 ); b.doc( mainIndex.typeName(), DOCUMENT_2 ); b.doc( mainIndex.typeName(), DOCUMENT_3 ); } ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void multiIndex_withIncompatibleIndex_dslConverterEnabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { StubMappingScope scope = mainIndex.createScope( incompatibleIndex ); String fieldPath = getFieldPath( fieldTypeDescriptor ); assertThatThrownBy( () -> { matchAllQuery( f -> f.field( fieldPath ), scope ); } ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Inconsistent configuration for field '" + fieldPath + "' in a search query across multiple indexes", "Inconsistent support for 'sort:field'" ) .satisfies( FailureReportUtils.hasContext( EventContexts.fromIndexNames( mainIndex.name(), incompatibleIndex.name() ) ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void multiIndex_withIncompatibleIndex_dslConverterDisabled(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { StubMappingScope scope = mainIndex.createScope( incompatibleIndex ); String fieldPath = getFieldPath( fieldTypeDescriptor ); assertThatThrownBy( () -> { matchAllQuery( f -> f.field( fieldPath ), scope ); } ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Inconsistent configuration for field '" + fieldPath + "' in a search query across multiple indexes", "Inconsistent support for 'sort:field'" ) .satisfies( FailureReportUtils.hasContext( EventContexts.fromIndexNames( mainIndex.name(), incompatibleIndex.name() ) ) ); } private SearchQuery<DocumentReference> matchAllQuery( Function<? super SearchSortFactory, ? extends SortFinalStep> sortContributor) { return matchAllQuery( sortContributor, mainIndex.createScope() ); } private SearchQuery<DocumentReference> matchAllQuery( Function<? super SearchSortFactory, ? extends SortFinalStep> sortContributor, StubMappingScope scope) { return scope.query() .where( f -> f.matchAll() ) .sort( sortContributor ) .toQuery(); } private SearchQuery<DocumentReference> matchNonEmptyQuery( Function<? super SearchSortFactory, ? extends SortFinalStep> sortContributor, StubMappingScope scope) { return scope.query() .where( f -> f.matchAll().except( f.id().matching( EMPTY ) ) ) .sort( sortContributor ) .toQuery(); } private String getFieldPath(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { return mainIndex.binding().fieldModels.get( fieldTypeDescriptor ).relativeFieldName; } private String getFieldInNestedPath(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { return mainIndex.binding().nested.relativeFieldName + '.' + mainIndex.binding().nested.fieldModels.get( fieldTypeDescriptor ).relativeFieldName; } private String getFieldWithDslConverterPath(StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { return mainIndex.binding().fieldWithDslConverterModels.get( fieldTypeDescriptor ).relativeFieldName; } private static void initDocument(IndexBinding indexBinding, DocumentElement document, Integer ordinal) { indexBinding.fieldModels.forEach( fieldModel -> addValue( fieldModel, document, ordinal ) ); indexBinding.fieldWithDslConverterModels.forEach( fieldModel -> addValue( fieldModel, document, ordinal ) ); DocumentElement nested = document.addObject( indexBinding.nested.self ); indexBinding.nested.fieldModels.forEach( fieldModel -> addValue( fieldModel, nested, ordinal ) ); indexBinding.nested.fieldWithDslConverterModels.forEach( fieldModel -> addValue( fieldModel, nested, ordinal ) ); } @SuppressWarnings("unchecked") private F getSingleValueForMissingUse(int ordinal, StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { F value = fieldTypeDescriptor.getAscendingUniqueTermValues().getSingle().get( ordinal ); if ( fieldTypeDescriptor instanceof NormalizedStringFieldTypeDescriptor && !TckConfiguration.get().getBackendFeatures().normalizesStringMissingValues() ) { // The backend doesn't normalize missing value replacements automatically, we have to do it ourselves // TODO HSEARCH-3387 Remove this once all backends correctly normalize missing value replacements value = (F) ( (String) value ).toLowerCase( Locale.ROOT ); } return value; } private String getSingleValueForMissingUseParse(int ordinal, StandardFieldTypeDescriptor<F> fieldTypeDescriptor) { return Objects.toString( getSingleValueForMissingUse( ordinal, fieldTypeDescriptor ) ); } private static <F> void addValue(SimpleFieldModel<F> fieldModel, DocumentElement documentElement, Integer ordinal) { if ( ordinal == null ) { return; } documentElement.addValue( fieldModel.reference, fieldModel.typeDescriptor.getAscendingUniqueTermValues().getSingle().get( ordinal ) ); } private static void initData() { BulkIndexer mainIndexer = mainIndex.bulkIndexer() // Important: do not index the documents in the expected order after sorts (1, 2, 3) .add( DOCUMENT_2, document -> initDocument( mainIndex.binding(), document, DOCUMENT_2_ORDINAL ) ) .add( EMPTY, document -> initDocument( mainIndex.binding(), document, null ) ) .add( DOCUMENT_1, document -> initDocument( mainIndex.binding(), document, DOCUMENT_1_ORDINAL ) ) .add( DOCUMENT_3, document -> initDocument( mainIndex.binding(), document, DOCUMENT_3_ORDINAL ) ); BulkIndexer compatibleIndexer = compatibleIndex.bulkIndexer() .add( COMPATIBLE_INDEX_DOCUMENT_1, document -> { CompatibleIndexBinding binding = compatibleIndex.binding(); binding.fieldModels.forEach( fieldModel -> addValue( fieldModel, document, DOCUMENT_1_ORDINAL ) ); binding.fieldWithDslConverterModels .forEach( fieldModel -> addValue( fieldModel, document, DOCUMENT_1_ORDINAL ) ); } ); BulkIndexer rawFieldCompatibleIndexer = rawFieldCompatibleIndex.bulkIndexer() .add( RAW_FIELD_COMPATIBLE_INDEX_DOCUMENT_1, document -> initDocument( rawFieldCompatibleIndex.binding(), document, DOCUMENT_1_ORDINAL ) ); BulkIndexer missingFieldIndexer = missingFieldIndex.bulkIndexer() .add( MISSING_FIELD_INDEX_DOCUMENT_1, document -> {} ); mainIndexer.join( compatibleIndexer, rawFieldCompatibleIndexer, missingFieldIndexer ); } private static class AbstractObjectMapping { final SimpleFieldModelsByType fieldModels; final SimpleFieldModelsByType fieldWithDslConverterModels; AbstractObjectMapping(IndexSchemaElement root, Consumer<StandardIndexFieldTypeOptionsStep<?, ?>> additionalConfiguration) { fieldModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root, "", c -> c.sortable( Sortable.YES ), additionalConfiguration ); fieldWithDslConverterModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root, "converted_", c -> c.sortable( Sortable.YES ), additionalConfiguration.andThen( c -> c.dslConverter( ValueWrapper.class, ValueWrapper.toDocumentValueConverter() ) ) ); } } private static class IndexBinding extends AbstractObjectMapping { final FirstLevelObjectMapping nested; IndexBinding(IndexSchemaElement root) { this( root, ignored -> {} ); } IndexBinding(IndexSchemaElement root, Consumer<StandardIndexFieldTypeOptionsStep<?, ?>> additionalConfiguration) { super( root, additionalConfiguration ); nested = FirstLevelObjectMapping.create( root, "nested", ObjectStructure.NESTED, additionalConfiguration ); } } private static class FirstLevelObjectMapping extends AbstractObjectMapping { final String relativeFieldName; final IndexObjectFieldReference self; public static FirstLevelObjectMapping create(IndexSchemaElement parent, String relativeFieldName, ObjectStructure structure, Consumer<StandardIndexFieldTypeOptionsStep<?, ?>> additionalConfiguration) { IndexSchemaObjectField objectField = parent.objectField( relativeFieldName, structure ); return new FirstLevelObjectMapping( relativeFieldName, objectField, additionalConfiguration ); } private FirstLevelObjectMapping(String relativeFieldName, IndexSchemaObjectField objectField, Consumer<StandardIndexFieldTypeOptionsStep<?, ?>> additionalConfiguration) { super( objectField, additionalConfiguration ); this.relativeFieldName = relativeFieldName; self = objectField.toReference(); } } private static class CompatibleIndexBinding { final SimpleFieldModelsByType fieldModels; final SimpleFieldModelsByType fieldWithDslConverterModels; CompatibleIndexBinding(IndexSchemaElement root) { fieldModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root, "", (fieldType, c) -> { c.sortable( Sortable.YES ); addIrrelevantOptions( fieldType, c ); } ); fieldWithDslConverterModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root, "converted_", (fieldType, c) -> { c.sortable( Sortable.YES ); c.dslConverter( ValueWrapper.class, ValueWrapper.toDocumentValueConverter() ); addIrrelevantOptions( fieldType, c ); } ); } // See HSEARCH-3307: this checks that irrelevant options are ignored when checking cross-index field compatibility protected void addIrrelevantOptions(FieldTypeDescriptor<?, ?> fieldType, StandardIndexFieldTypeOptionsStep<?, ?> c) { c.searchable( Searchable.NO ); c.projectable( Projectable.YES ); if ( fieldType.isFieldSortSupported() ) { c.aggregable( Aggregable.YES ); } } } private static class RawFieldCompatibleIndexBinding extends IndexBinding { RawFieldCompatibleIndexBinding(IndexSchemaElement root) { /* * Add fields with the same name as the fieldModels from IndexBinding, * but with an incompatible DSL converter. */ super( root, c -> c.dslConverter( ValueWrapper.class, ValueWrapper.toDocumentValueConverter() ) ); } } private static class MissingFieldIndexBinding { MissingFieldIndexBinding(IndexSchemaElement root) { } } private static class IncompatibleIndexBinding { IncompatibleIndexBinding(IndexSchemaElement root) { /* * Add fields with the same name as the fieldModels from IndexBinding, * but with an incompatible type. */ mapFieldsWithIncompatibleType( root ); } private static void mapFieldsWithIncompatibleType(IndexSchemaElement parent) { supportedFieldTypes.forEach( typeDescriptor -> SimpleFieldModel.mapper( FieldTypeDescriptor.getIncompatible( typeDescriptor ) ) .map( parent, "" + typeDescriptor.getUniqueName() ) ); } } }
apache/sis
35,763
endorsed/src/org.apache.sis.referencing/main/org/apache/sis/referencing/operation/provider/NTv2.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.referencing.operation.provider; import java.util.Map; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.ArrayList; import java.util.Arrays; import java.util.Locale; import java.util.logging.Level; import java.io.IOException; import java.nio.ByteOrder; import java.nio.ByteBuffer; import java.net.URI; import java.nio.channels.ReadableByteChannel; import java.nio.charset.StandardCharsets; import jakarta.xml.bind.annotation.XmlTransient; import javax.measure.Unit; import javax.measure.quantity.Angle; import org.opengis.util.FactoryException; import org.opengis.parameter.ParameterDescriptor; import org.opengis.parameter.ParameterDescriptorGroup; import org.opengis.parameter.ParameterNotFoundException; import org.opengis.referencing.cs.EllipsoidalCS; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.Transformation; import org.opengis.referencing.operation.NoninvertibleTransformException; import org.apache.sis.referencing.operation.gridded.CompressedGrid; import org.apache.sis.referencing.operation.gridded.GridLoader; import org.apache.sis.referencing.operation.gridded.GridGroup; import org.apache.sis.referencing.operation.gridded.GridFile; import org.apache.sis.referencing.operation.gridded.LoadedGrid; import org.apache.sis.referencing.internal.Resources; import org.apache.sis.referencing.internal.shared.Formulas; import org.apache.sis.parameter.ParameterBuilder; import org.apache.sis.parameter.Parameters; import org.apache.sis.util.internal.shared.Strings; import org.apache.sis.util.resources.Errors; import org.apache.sis.util.resources.Messages; import org.apache.sis.measure.Units; import org.apache.sis.pending.jdk.JDK19; /** * The provider for <q>National Transformation version 2</q> (EPSG:9615). * This transform requires data that are not bundled by default with Apache SIS. * * @author Simon Reynard (Geomatys) * @author Martin Desruisseaux (Geomatys) */ @XmlTransient public final class NTv2 extends AbstractProvider { /** * Serial number for inter-operability with different versions. */ private static final long serialVersionUID = -4027618007780159180L; /** * The operation parameter descriptor for the <q>Latitude and longitude difference file</q> parameter value. * The file extension is typically {@code ".gsb"}. There is no default value. * * <!-- Generated by ParameterNameTableGenerator --> * <table class="sis"> * <caption>Parameter names</caption> * <tr><td> EPSG: </td><td> Latitude and longitude difference file </td></tr> * </table> * <b>Notes:</b> * <ul> * <li>No default value</li> * </ul> */ static final ParameterDescriptor<URI> FILE; /** * The group of all parameters expected by this coordinate operation. */ private static final ParameterDescriptorGroup PARAMETERS; static { final ParameterBuilder builder = builder(); FILE = builder .addIdentifier("8656") .addName("Latitude and longitude difference file") .create(URI.class, null); PARAMETERS = builder .addIdentifier("9615") .addName("NTv2") .addName("Geographic2D Offsets (NTv2)") .createGroup(FILE); } /** * Creates a new provider. */ public NTv2() { super(Transformation.class, PARAMETERS, EllipsoidalCS.class, false, EllipsoidalCS.class, false, (byte) 2); } /** * Creates a transform from the specified group of parameter values. * * @param context the parameter values together with its context. * @return the created math transform. * @throws ParameterNotFoundException if a required parameter was not found. * @throws FactoryException if an error occurred while loading the grid. */ @Override public MathTransform createMathTransform(final Context context) throws FactoryException { return createMathTransform(NTv2.class, context, 2); } /** * Creates a transform from the specified group of parameter values. * * @param provider the provider which is creating a transform: {@link NTv2} or {@link NTv1}. * @param context the parameter values together with its context. * @param version the expected version (1 or 2). * @return the created math transform. * @throws ParameterNotFoundException if a required parameter was not found. * @throws FactoryException if an error occurred while loading the grid. */ static MathTransform createMathTransform(final Class<? extends AbstractProvider> provider, final Context context, final int version) throws FactoryException { final GridFile file = new GridFile(Parameters.castOrWrap(context.getCompletedParameters()), FILE); final LoadedGrid<Angle,Angle> grid; try { grid = getOrLoad(provider, file, version); } catch (FactoryException e) { throw e; } catch (Exception e) { throw file.canNotLoad(provider, provider.getSimpleName(), e); } return LoadedGrid.createGeodeticTransformation(provider, context.getFactory(), grid); } /** * Returns the grid of the given name. * This method returns the cached instance if it still exists, or load the grid otherwise. * * @param provider the provider which is creating a transform. * @param file relative or absolute path of the datum shift grid file to load. * @param version the expected version (1 or 2). * @throws Exception if an error occurred while loading the grid. * Caller should handle the exception with {@code canNotLoad(…)}. * * @see GridLoader#canNotLoad(String, URI, Exception) */ static LoadedGrid<Angle,Angle> getOrLoad(final Class<? extends AbstractProvider> provider, final GridFile file, final int version) throws Exception { return LoadedGrid.getOrLoad(file, null, () -> { final LoadedGrid<?,?> grid; try (ReadableByteChannel in = file.newByteChannel()) { file.startLoading(provider); final Loader loader = new Loader(in, file, version); grid = loader.readAllGrids(); loader.report(provider); } return grid.useSharedData(); }).castTo(Angle.class, Angle.class); } /** * Loaders of NTv2 data. Instances of this class exist only at loading time. * More information on that file format can be found with * <a href="https://github.com/Esri/ntv2-file-routines">ESRI NTv2 routines</a>. * * <p>A NTv2 file contains an arbitrary number of sub-files, where each sub-file is a grid. * There is at least one grid (the parent), and potentially many sub-grids of higher density. * At the beginning is an overview header block of information that is common to all sub-files. * Then there is other headers specific to each sub-files.</p> * * <p>While this loader is primarily targeted at loading NTv2 files, it can also opportunistically * read NTv1 files. The two file formats differ by some header records having different names (but * same meanings), the possibility to have sub-grids and the presence of accuracy information.</p> * * @author Simon Reynard (Geomatys) * @author Martin Desruisseaux (Geomatys) */ private static final class Loader extends GridLoader { /** * Size of a record. This value applies to both the header records and the data records. * In the case of header records, this is the size of the key plus the size of the value. */ private static final int RECORD_LENGTH = 16; /** * Maximum number of characters for a key in a header record. * Expected keys are listed in the {@link #TYPES} map. */ private static final int KEY_LENGTH = 8; /** * Type of data allowed in header records. Each record header identified by a key contains a value * of a type hard-coded by the NTv2 specification; the type is not specified in the file itself. */ private enum DataType {STRING, INTEGER, DOUBLE}; /** * Some known keywords that may appear in NTv2 header records, associated the expected type of values. * The type is not encoded in a NTv2 file; it has to be hard-coded in this table. The first 11 entries in * this map (ignoring entries marked by "NTv1") are typically found in overview header, and the remaining * entries in the sub-grid headers. */ private static final Map<String,DataType> TYPES; static { final Map<String,DataType> types = new HashMap<>(38); /* NTv1 */ types.put("HEADER", DataType.INTEGER); // Number of header records (replaced by NUM_OREC) types.put("NUM_OREC", DataType.INTEGER); // Number of records in the header - usually 11 types.put("NUM_SREC", DataType.INTEGER); // Number of records in the header of sub-grids - usually 11 types.put("NUM_FILE", DataType.INTEGER); // Number of sub-grids /* NTv1 */ types.put("TYPE", DataType.STRING); // Grid shift data type (replaced by GS_TYPE) types.put("GS_TYPE", DataType.STRING); // Units: "SECONDS", "MINUTES" or "DEGREES" types.put("VERSION", DataType.STRING); // Grid version /* NTv1 */ types.put("FROM", DataType.STRING); // Source CRS (replaced by SYSTEM_F) /* NTv1 */ types.put("TO", DataType.STRING); // Target CRS (replaced by SYSTEM_T) types.put("SYSTEM_F", DataType.STRING); // Source CRS types.put("SYSTEM_T", DataType.STRING); // Target CRS types.put("DATUM_F", DataType.STRING); // Source datum (some time replace SYSTEM_F) types.put("DATUM_T", DataType.STRING); // Target datum (some time replace SYSTEM_T) types.put("MAJOR_F", DataType.DOUBLE); // Semi-major axis of source ellipsoid (in metres) types.put("MINOR_F", DataType.DOUBLE); // Semi-minor axis of source ellipsoid (in metres) types.put("MAJOR_T", DataType.DOUBLE); // Semi-major axis of target ellipsoid (in metres) types.put("MINOR_T", DataType.DOUBLE); // Semi-minor axis of target ellipsoid (in metres) types.put("SUB_NAME", DataType.STRING); // Sub-grid identifier types.put("PARENT", DataType.STRING); // Parent grid types.put("CREATED", DataType.STRING); // Creation time types.put("UPDATED", DataType.STRING); // Update time types.put("S_LAT", DataType.DOUBLE); // Southmost φ value types.put("N_LAT", DataType.DOUBLE); // Northmost φ value types.put("E_LONG", DataType.DOUBLE); // Eastmost λ value - west is positive, east is negative types.put("W_LONG", DataType.DOUBLE); // Westmost λ value - west is positive, east is negative /* NTv1 */ types.put("N_GRID", DataType.DOUBLE); // Latitude grid interval (replaced by LAT_INC) /* NTv1 */ types.put("W_GRID", DataType.DOUBLE); // Longitude grid interval (replaced by LONG_INC) types.put("LAT_INC", DataType.DOUBLE); // Increment on φ axis types.put("LONG_INC", DataType.DOUBLE); // Increment on λ axis - positive toward west types.put("GS_COUNT", DataType.INTEGER); // Number of sub-grid records following TYPES = types; /* * NTv1 as two last unnamed records of DataType.DOUBLE: "Semi_Major_Axis_From" * and "Semi_Major_Axis_To". Those records are currently ignored. */ } /** * The headers content, as the union of the overview header and the header in process of being read. * Keys are strings like {@code "VERSION"}, {@code "SYSTEM_F"}, {@code "LONG_INC"}, <i>etc.</i>. * Values are {@link String}, {@link Integer} or {@link Double}. If some keys are unrecognized, * they will be put in this map with the {@code null} value and the {@link #hasUnrecognized} flag * will be set to {@code true}. */ private final Map<String,Object> header; /** * Keys of {@link #header} for entries that were declared in the overview header. * This is used after {@link #readGrid(Map, Map)} execution for discarding all * entries specific to sub-grids, for avoiding to mix entries from two sub-grids. */ private final String[] overviewKeys; /** * {@code true} if we are reading a NTv2 file, or {@code false} if we are reading a NTv1 file. */ private final boolean isV2; /** * {@code true} if the {@code header} map contains at least one key associated to a null value. */ private boolean hasUnrecognized; /** * Number of grids expected in the file. */ private final int numGrids; /** * Dates at which the grid has been created or updated, or {@code null} if unknown. * Used for information purpose only. */ private String created, updated; /** * Creates a new reader for the given channel. * This constructor parses the header immediately, but does not read any grid. * A hint about expected NTv2 version is given, but this constructor may override * that hint with information found in the file. * * @param channel where to read data from. * @param file path to the longitude and latitude difference file. * Used for parameter declaration and error reporting. * @param version the expected version (1 or 2). * @throws FactoryException if a data record cannot be parsed. */ Loader(final ReadableByteChannel channel, final GridFile file, int version) throws IOException, FactoryException { super(channel, ByteBuffer.allocate(4096), file); header = new LinkedHashMap<>(); ensureBufferContains(RECORD_LENGTH); if (isLittleEndian(buffer.getInt(KEY_LENGTH))) { buffer.order(ByteOrder.LITTLE_ENDIAN); } /* * Read the overview header. It is normally made of the first 11 records documented in TYPES map: * NUM_OREC, NUM_SREC, NUM_FILE, GS_TYPE, VERSION, SYSTEM_F, SYSTEM_T, MAJOR_F, MINOR_F, MAJOR_T, * MINOR_T. */ readHeader(version >= 2 ? 11 : 12, "NUM_OREC"); /* * The version number is a string like "NTv2.0". If there is no version number, it is probably NTv1 * since the "VERSION" record was introduced only in version 2. In such case the `version` parameter * should have been 1; in case of doubt we do not modify the provided value. */ final String vs = (String) get("VERSION", false); if (vs != null) { for (int i=0; i<vs.length(); i++) { final char c = vs.charAt(i); if (c >= '0' && c <= '9') { version = c - '0'; break; } } } /* * Subgrids are NTv2 features which did not existed in NTv1. If we expect a NTv2 file, * the record is mandatory. If we expect a NTv1 file, the record should not be present * but we nevertheless check in case we have been misleaded by a missing "VERSION" record. */ final Integer n = (Integer) get("NUM_FILE", (vs != null) && version >= 2); isV2 = (n != null); if (isV2) { numGrids = n; if (numGrids < 1) { throw new FactoryException(Errors.format(Errors.Keys.UnexpectedValueInElement_2, "NUM_FILE", n)); } } else { numGrids = 1; } overviewKeys = header.keySet().toArray(String[]::new); } /** * Returns {@code true} if the given value seems to be stored in little endian order. * The strategy is to read an integer that we expect to be small (the HEADER or NUM_OREC * value which should be 12 or 11) and to check which order gives the smallest value. */ private static boolean isLittleEndian(final int n) { return Integer.compareUnsigned(n, Integer.reverseBytes(n)) > 0; } /** * Reads a string at the current buffer position, assuming ASCII encoding. * After this method call, the buffer position will be the first byte after * the string. The buffer content is unmodified. * * @param length number of bytes to read. */ private String readString(int length) { final byte[] array = buffer.array(); final int position = buffer.position(); buffer.position(position + length); // Update before we modify `length`. while (length > position && array[position + length - 1] <= ' ') length--; return new String(array, position, length, StandardCharsets.US_ASCII).trim(); } /** * Reads all records found in the header, starting from the current buffer position. * The header may be the overview header (in which case we expect a number of records * given by {@code HEADER} or {@code NUM_OREC} value) or a sub-grid header (in which * case we expect {@code NUM_SREC} records). * * <p>The {@code numRecords} given in argument is a default value. * It will be updated as soon as the {@code numKey} record is found.</p> * * @param numRecords default number of expected records (usually 11). * @param numkey key of the record giving the number of records: {@code "NUM_OREC"} or {@code "NUM_SREC"}. */ private void readHeader(int numRecords, final String numkey) throws IOException, FactoryException { for (int i=0; i < numRecords; i++) { ensureBufferContains(RECORD_LENGTH); final String key = readString(KEY_LENGTH).toUpperCase(Locale.US).replace(' ', '_'); final DataType type = TYPES.get(key); final Comparable<?> value; if (type == null) { value = null; hasUnrecognized = true; } else switch (type) { // TODO: check if we can simplify in JDK14. default: throw new AssertionError(type); case STRING: value = readString(RECORD_LENGTH - KEY_LENGTH); break; case DOUBLE: value = buffer.getDouble(); break; case INTEGER: { final int n = buffer.getInt(); buffer.position(buffer.position() + Integer.BYTES); if (key.equals(numkey) || key.equals("HEADER")) { /* * HEADER (NTv1), NUM_OREC (NTv2) or NUM_SREC specify the number of records expected * in the header, which may the header that we are reading right now. If value * applies to the reader we are reading, we need to update `numRecords` on the fly. */ numRecords = n; } value = n; break; } } final Object old = header.put(key, value); if (old != null && !old.equals(value)) { throw new FactoryException(Errors.format(Errors.Keys.KeyCollision_1, key)); } } if (created == null) created = Strings.trimOrNull((String) get("CREATED", false)); if (updated == null) updated = Strings.trimOrNull((String) get("UPDATED", false)); } /** * Reads all grids and returns the root grid. After reading all grids, this method rearrange * them in a child-parent relationship. The result is a tree with a single root containing * sub-grids (if any) as children. */ final LoadedGrid<Angle,Angle> readAllGrids() throws IOException, FactoryException, NoninvertibleTransformException { final Map<String, LoadedGrid<Angle,Angle>> grids = JDK19.newHashMap(numGrids); final Map<String, List<LoadedGrid<Angle,Angle>>> children = new LinkedHashMap<>(); // Should have few entries. while (grids.size() < numGrids) { readGrid(grids, children); } /* * Assign the sub-grids to their parent only after we finished to read all grids. * Doing this work last is more robust to cases where grids are in random order. * * Notes: if the parent-child graph contains cycles (deeper than a child declaring itself as its parent), * the grids in cycles will be lost. This is because we need a grid without parent for getting the * graph added in the roots list. There is currently no mechanism for detecting those problems. */ final List<LoadedGrid<Angle,Angle>> roots = new ArrayList<>(); for (final Map.Entry<String, List<LoadedGrid<Angle,Angle>>> entry : children.entrySet()) { final LoadedGrid<Angle,Angle> parent = grids.get(entry.getKey()); final List<LoadedGrid<Angle,Angle>> subgrids = entry.getValue(); if (parent != null) { /* * Verify that the children does not declare themselves as their parent. * It may happen if SUB_GRID and PARENT have the same value, typically a * null or empty value if those records were actually unspecified. */ for (int i=subgrids.size(); --i >= 0;) { if (subgrids.get(i) == parent) { // Want identity check, no need for equals(Object). subgrids.remove(i); roots.add(parent); break; } } if (!subgrids.isEmpty()) { parent.setSubGrids(subgrids); } } else { roots.addAll(subgrids); } } switch (roots.size()) { case 0: throw new FactoryException(Errors.format(Errors.Keys.CanNotRead_1, file)); case 1: return roots.get(0); default: return GridGroup.create(file, roots); } } /** * Reads the next grid, starting at the current position. A NTv2 file can have many grids. * This can be used for grids having different resolutions depending on the geographic area. * The first grid can cover a large area with a coarse resolution, and next grids cover smaller * areas overlapping the first grid but with finer resolution. * * <p>NTv2 grids contain also information about shifts accuracy. This is not yet handled by SIS, * except for determining an approximate grid cell resolution.</p> * * @param addTo the map where to add the grid with the grid name as the key. * @param children the map where to add children with the parent name as the key. */ private void readGrid(final Map<String, LoadedGrid<Angle,Angle>> addTo, final Map<String, List<LoadedGrid<Angle,Angle>>> children) throws IOException, FactoryException, NoninvertibleTransformException { if (isV2) { readHeader((Integer) get("NUM_SREC", null, null), "NUM_SREC"); } /* * Extract the geographic bounding box and cell size. While different units are allowed, * in practice we usually have seconds of angle. This unit has the advantage of allowing * all floating-point values to be integers. * * Note that the longitude values in NTv2 files are positive WEST. */ final Unit<Angle> unit; final double precision; final String type = (String) get("GS_TYPE", "TYPE", null); if (type.equalsIgnoreCase("SECONDS")) { // Most common value unit = Units.ARC_SECOND; precision = SECOND_PRECISION; // Used only as a hint; will not hurt if wrong. } else if (type.equalsIgnoreCase("MINUTES")) { unit = Units.ARC_MINUTE; precision = SECOND_PRECISION / 60; // Used only as a hint; will not hurt if wrong. } else if (type.equalsIgnoreCase("DEGREES")) { unit = Units.DEGREE; precision = SECOND_PRECISION / DEGREES_TO_SECONDS; // Used only as a hint; will not hurt if wrong. } else { throw new FactoryException(Errors.format(Errors.Keys.UnexpectedValueInElement_2, "GS_TYPE", type)); } final double ymin = (Double) get("S_LAT", null, null); final double ymax = (Double) get("N_LAT", null, null); final double xmin = (Double) get("E_LONG", null, null); // Sign reversed compared to usual convention. final double xmax = (Double) get("W_LONG", null, null); // Idem. final double dy = (Double) get("LAT_INC", "N_GRID", null); final double dx = (Double) get("LONG_INC", "W_GRID", null); // Positive toward west. final Integer declared = (Integer) get("GS_COUNT", false); final int width = Math.toIntExact(Math.round((xmax - xmin) / dx + 1)); final int height = Math.toIntExact(Math.round((ymax - ymin) / dy + 1)); final int count = Math.multiplyExact(width, height); if (declared != null && count != declared) { throw new FactoryException(Errors.format(Errors.Keys.UnexpectedValueInElement_2, "GS_COUNT", declared)); } /* * Construct the grid. The sign of longitude translations will need to be reversed in order to have * longitudes increasing toward East. We set isCellValueRatio = true (by the arguments given to the * LoadedGrid constructor) because this is required by InterpolatedTransform implementation. * This setting implies that we divide translation values by dx or dy at reading time. Note that this * free us from reversing the sign of longitude translations in the code below; instead, this reversal * will be handled by grid.coordinateToGrid MathTransform and its inverse. */ final double size = Math.max(dx, dy); final LoadedGrid<Angle,Angle> grid; if (isV2) { final LoadedGrid.Float<Angle,Angle> data; data = new LoadedGrid.Float<>(2, unit, unit, true, -xmin, ymin, -dx, dy, width, height, PARAMETERS, file); @SuppressWarnings("MismatchedReadAndWriteOfArray") final float[] tx = data.offsets[0]; @SuppressWarnings("MismatchedReadAndWriteOfArray") final float[] ty = data.offsets[1]; data.accuracy = Double.NaN; for (int i=0; i<count; i++) { ensureBufferContains(4 * Float.BYTES); ty[i] = (float) (buffer.getFloat() / dy); // Division by dx and dy because isCellValueRatio = true. tx[i] = (float) (buffer.getFloat() / dx); final double accuracy = Math.min(buffer.getFloat() / dy, buffer.getFloat() / dx); if (accuracy > 0 && !(accuracy >= data.accuracy)) { // Use `!` for replacing the initial NaN. data.accuracy = accuracy; // Smallest non-zero accuracy. } } grid = CompressedGrid.compress(data, null, precision / size); } else { /* * NTv1: same as NTv2 but using double precision and without accuracy information. */ final LoadedGrid.Double<Angle,Angle> data; grid = data = new LoadedGrid.Double<>(2, unit, unit, true, -xmin, ymin, -dx, dy, width, height, PARAMETERS, file); @SuppressWarnings("MismatchedReadAndWriteOfArray") final double[] tx = data.offsets[0]; @SuppressWarnings("MismatchedReadAndWriteOfArray") final double[] ty = data.offsets[1]; for (int i=0; i<count; i++) { ensureBufferContains(2 * Double.BYTES); ty[i] = buffer.getDouble() / dy; tx[i] = buffer.getDouble() / dx; } } /* * We need an estimation of translation accuracy, in order to decide when to stop iterations * during inverse transformations. If we did not found that information in the file, compute * an arbitrary default accuracy. */ if (!(grid.accuracy > 0)) { // Use ! for catching NaN values (paranoiac check). grid.accuracy = Units.DEGREE.getConverterTo(unit).convert(Formulas.ANGULAR_TOLERANCE) / size; } /* * Add the grid to two collection. The first collection associates this grid to its name, and the * second collection associates the grid to its parent. We do not try to resolve the child-parent * relationship here; we will do that after all sub-grids have been read. */ final String name = (String) get("SUB_NAME", numGrids > 1); if (addTo.put(name, grid) != null) { throw new FactoryException(Errors.format(Errors.Keys.DuplicatedIdentifier_1, name)); } children.computeIfAbsent((String) get("PARENT", numGrids > 1), (k) -> new ArrayList<>()).add(grid); /* * End of grid parsing. Remove all header entries that are specific to this sub-grid. * After this operation, `header` will contain only overview records. */ header.keySet().retainAll(Arrays.asList(overviewKeys)); } /** * Gets the value for the given key. If the value is absent, then this method throws an exception * if {@code mandatory} is {@code true} or returns {@code null} otherwise. * * @param key key of the value to search. * @param mandatory whether to throw an exception if the value is not found. * @return value associated to the given key, or {@code null} if none and not mandatory. */ private Object get(final String key, final boolean mandatory) throws FactoryException { final Object value = header.get(key); if (value != null || !mandatory) { return value; } throw new FactoryException(Errors.format(Errors.Keys.PropertyNotFound_2, file, key)); } /** * Returns the value for the given key, or throws an exception if the value is not found. * Before to fail if the key is not found, this method searches for a value associated to * an alternative name. That alternative should be the name used in legacy NTv1. * * @param key key of the value to search. * @param alt alternative key name, or name used in NTv1, or {@code null} if none. * @param kv1 name used in NTv1, or {@code null} if none. * @return value associated to the given key (never {@code null}). */ private Object get(final String key, final String alt, final String kv1) throws FactoryException { Object value = header.get(key); if (value == null) { value = header.get(alt); if (value == null) { value = header.get(kv1); if (value == null) { throw new FactoryException(Errors.format(Errors.Keys.PropertyNotFound_2, file, key)); } } } return value; } /** * If we had any warnings during the loading process, report them now. * * @param caller the provider which created this loader. */ void report(final Class<? extends AbstractProvider> caller) { try { final String source = (String) get("SYSTEM_F", "DATUM_F", "FROM"); final String target = (String) get("SYSTEM_T", "DATUM_T", "TO"); log(caller, Resources.forLocale(null).createLogRecord(Level.FINE, Resources.Keys.UsingDatumShiftGrid_4, source, target, (created != null) ? created : "?", (updated != null) ? updated : "?")); } catch (FactoryException e) { recoverableException(caller, e); // Ignore since above code is only for information purpose. } if (hasUnrecognized) { final StringBuilder keywords = new StringBuilder(); for (final Map.Entry<String,Object> entry : header.entrySet()) { if (entry.getValue() == null) { if (keywords.length() != 0) { keywords.append(", "); } keywords.append(entry.getKey()); } } log(caller, Messages.forLocale(null).createLogRecord(Level.WARNING, Messages.Keys.UnknownKeywordInRecord_2, file, keywords.toString())); } } } }
googleapis/google-cloud-java
35,560
java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/CreateChannelPartnerRepricingConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.channel.v1; /** * * * <pre> * Request message for * [CloudChannelService.CreateChannelPartnerRepricingConfig][google.cloud.channel.v1.CloudChannelService.CreateChannelPartnerRepricingConfig]. * </pre> * * Protobuf type {@code google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest} */ public final class CreateChannelPartnerRepricingConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest) CreateChannelPartnerRepricingConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateChannelPartnerRepricingConfigRequest.newBuilder() to construct. private CreateChannelPartnerRepricingConfigRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateChannelPartnerRepricingConfigRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateChannelPartnerRepricingConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_CreateChannelPartnerRepricingConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_CreateChannelPartnerRepricingConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest.class, com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the ChannelPartner that will receive the * repricing config. Parent uses the format: * accounts/{account_id}/channelPartnerLinks/{channel_partner_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the ChannelPartner that will receive the * repricing config. Parent uses the format: * accounts/{account_id}/channelPartnerLinks/{channel_partner_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CHANNEL_PARTNER_REPRICING_CONFIG_FIELD_NUMBER = 2; private com.google.cloud.channel.v1.ChannelPartnerRepricingConfig channelPartnerRepricingConfig_; /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the channelPartnerRepricingConfig field is set. */ @java.lang.Override public boolean hasChannelPartnerRepricingConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The channelPartnerRepricingConfig. */ @java.lang.Override public com.google.cloud.channel.v1.ChannelPartnerRepricingConfig getChannelPartnerRepricingConfig() { return channelPartnerRepricingConfig_ == null ? com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.getDefaultInstance() : channelPartnerRepricingConfig_; } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.channel.v1.ChannelPartnerRepricingConfigOrBuilder getChannelPartnerRepricingConfigOrBuilder() { return channelPartnerRepricingConfig_ == null ? com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.getDefaultInstance() : channelPartnerRepricingConfig_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getChannelPartnerRepricingConfig()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, getChannelPartnerRepricingConfig()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest)) { return super.equals(obj); } com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest other = (com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasChannelPartnerRepricingConfig() != other.hasChannelPartnerRepricingConfig()) return false; if (hasChannelPartnerRepricingConfig()) { if (!getChannelPartnerRepricingConfig().equals(other.getChannelPartnerRepricingConfig())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasChannelPartnerRepricingConfig()) { hash = (37 * hash) + CHANNEL_PARTNER_REPRICING_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getChannelPartnerRepricingConfig().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [CloudChannelService.CreateChannelPartnerRepricingConfig][google.cloud.channel.v1.CloudChannelService.CreateChannelPartnerRepricingConfig]. * </pre> * * Protobuf type {@code google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest) com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_CreateChannelPartnerRepricingConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_CreateChannelPartnerRepricingConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest.class, com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest.Builder.class); } // Construct using // com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getChannelPartnerRepricingConfigFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; channelPartnerRepricingConfig_ = null; if (channelPartnerRepricingConfigBuilder_ != null) { channelPartnerRepricingConfigBuilder_.dispose(); channelPartnerRepricingConfigBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_CreateChannelPartnerRepricingConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest getDefaultInstanceForType() { return com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest build() { com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest buildPartial() { com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest result = new com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.channelPartnerRepricingConfig_ = channelPartnerRepricingConfigBuilder_ == null ? channelPartnerRepricingConfig_ : channelPartnerRepricingConfigBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest) { return mergeFrom( (com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest other) { if (other == com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasChannelPartnerRepricingConfig()) { mergeChannelPartnerRepricingConfig(other.getChannelPartnerRepricingConfig()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getChannelPartnerRepricingConfigFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the ChannelPartner that will receive the * repricing config. Parent uses the format: * accounts/{account_id}/channelPartnerLinks/{channel_partner_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the ChannelPartner that will receive the * repricing config. Parent uses the format: * accounts/{account_id}/channelPartnerLinks/{channel_partner_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the ChannelPartner that will receive the * repricing config. Parent uses the format: * accounts/{account_id}/channelPartnerLinks/{channel_partner_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the ChannelPartner that will receive the * repricing config. Parent uses the format: * accounts/{account_id}/channelPartnerLinks/{channel_partner_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the ChannelPartner that will receive the * repricing config. Parent uses the format: * accounts/{account_id}/channelPartnerLinks/{channel_partner_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.channel.v1.ChannelPartnerRepricingConfig channelPartnerRepricingConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.ChannelPartnerRepricingConfig, com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.Builder, com.google.cloud.channel.v1.ChannelPartnerRepricingConfigOrBuilder> channelPartnerRepricingConfigBuilder_; /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the channelPartnerRepricingConfig field is set. */ public boolean hasChannelPartnerRepricingConfig() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The channelPartnerRepricingConfig. */ public com.google.cloud.channel.v1.ChannelPartnerRepricingConfig getChannelPartnerRepricingConfig() { if (channelPartnerRepricingConfigBuilder_ == null) { return channelPartnerRepricingConfig_ == null ? com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.getDefaultInstance() : channelPartnerRepricingConfig_; } else { return channelPartnerRepricingConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setChannelPartnerRepricingConfig( com.google.cloud.channel.v1.ChannelPartnerRepricingConfig value) { if (channelPartnerRepricingConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } channelPartnerRepricingConfig_ = value; } else { channelPartnerRepricingConfigBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setChannelPartnerRepricingConfig( com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.Builder builderForValue) { if (channelPartnerRepricingConfigBuilder_ == null) { channelPartnerRepricingConfig_ = builderForValue.build(); } else { channelPartnerRepricingConfigBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeChannelPartnerRepricingConfig( com.google.cloud.channel.v1.ChannelPartnerRepricingConfig value) { if (channelPartnerRepricingConfigBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && channelPartnerRepricingConfig_ != null && channelPartnerRepricingConfig_ != com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.getDefaultInstance()) { getChannelPartnerRepricingConfigBuilder().mergeFrom(value); } else { channelPartnerRepricingConfig_ = value; } } else { channelPartnerRepricingConfigBuilder_.mergeFrom(value); } if (channelPartnerRepricingConfig_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearChannelPartnerRepricingConfig() { bitField0_ = (bitField0_ & ~0x00000002); channelPartnerRepricingConfig_ = null; if (channelPartnerRepricingConfigBuilder_ != null) { channelPartnerRepricingConfigBuilder_.dispose(); channelPartnerRepricingConfigBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.Builder getChannelPartnerRepricingConfigBuilder() { bitField0_ |= 0x00000002; onChanged(); return getChannelPartnerRepricingConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.channel.v1.ChannelPartnerRepricingConfigOrBuilder getChannelPartnerRepricingConfigOrBuilder() { if (channelPartnerRepricingConfigBuilder_ != null) { return channelPartnerRepricingConfigBuilder_.getMessageOrBuilder(); } else { return channelPartnerRepricingConfig_ == null ? com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.getDefaultInstance() : channelPartnerRepricingConfig_; } } /** * * * <pre> * Required. The ChannelPartnerRepricingConfig object to update. * </pre> * * <code> * .google.cloud.channel.v1.ChannelPartnerRepricingConfig channel_partner_repricing_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.ChannelPartnerRepricingConfig, com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.Builder, com.google.cloud.channel.v1.ChannelPartnerRepricingConfigOrBuilder> getChannelPartnerRepricingConfigFieldBuilder() { if (channelPartnerRepricingConfigBuilder_ == null) { channelPartnerRepricingConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.ChannelPartnerRepricingConfig, com.google.cloud.channel.v1.ChannelPartnerRepricingConfig.Builder, com.google.cloud.channel.v1.ChannelPartnerRepricingConfigOrBuilder>( getChannelPartnerRepricingConfig(), getParentForChildren(), isClean()); channelPartnerRepricingConfig_ = null; } return channelPartnerRepricingConfigBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest) private static final com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest(); } public static com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateChannelPartnerRepricingConfigRequest> PARSER = new com.google.protobuf.AbstractParser<CreateChannelPartnerRepricingConfigRequest>() { @java.lang.Override public CreateChannelPartnerRepricingConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateChannelPartnerRepricingConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateChannelPartnerRepricingConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.CreateChannelPartnerRepricingConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-api-java-client-services
35,645
clients/google-api-services-apigee/v1/2.0.0/com/google/api/services/apigee/v1/model/GoogleCloudApigeeV1Organization.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.apigee.v1.model; /** * Model definition for GoogleCloudApigeeV1Organization. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Apigee API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudApigeeV1Organization extends com.google.api.client.json.GenericJson { /** * Optional. Addon configurations of the Apigee organization. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudApigeeV1AddonsConfig addonsConfig; /** * Required. DEPRECATED: This field will eventually be deprecated and replaced with a differently- * named field. Primary Google Cloud region for analytics data storage. For valid values, see * [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get- * started/create-org). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String analyticsRegion; /** * Optional. Cloud KMS key name used for encrypting API consumer data. If not specified or * [BillingType](#BillingType) is `EVALUATION`, a Google-Managed encryption key will be used. * Format: `projects/locations/keyRings/cryptoKeys` * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String apiConsumerDataEncryptionKeyName; /** * Optional. This field is needed only for customers using non-default data residency regions. * Apigee stores some control plane data only in single region. This field determines which single * region Apigee should use. For example: "us-west1" when control plane is in US or "europe-west2" * when control plane is in EU. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String apiConsumerDataLocation; /** * Output only. Apigee Project ID associated with the organization. Use this project to allowlist * Apigee in the Service Attachment when using private service connect with Apigee. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String apigeeProjectId; /** * Not used by Apigee. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> attributes; /** * Optional. Compute Engine network used for Service Networking to be peered with Apigee runtime * instances. See [Getting started with the Service Networking * API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). * Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the * creation of a runtime instance and can be updated only when there are no runtime instances. For * example: `default`. When changing authorizedNetwork, you must reconfigure VPC peering. After * VPC peering with previous network is deleted, [run the following * command](https://cloud.google.com/sdk/gcloud/reference/services/vpc-peerings/delete): `gcloud * services vpc-peerings delete --network=NETWORK`, where `NETWORK` is the name of the previous * network. This will delete the previous Service Networking. Otherwise, you will get the * following error: `The resource 'projects/...-tp' is already linked to another shared VPC host * 'projects/...-tp`. Apigee also supports shared VPC (that is, the host network project is not * the same as the one that is peering with Apigee). See [Shared VPC * overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the * following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: * `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee * hybrid. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String authorizedNetwork; /** * Optional. Billing type of the Apigee organization. See [Apigee * pricing](https://cloud.google.com/apigee/pricing). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String billingType; /** * Output only. Base64-encoded public certificate for the root CA of the Apigee organization. * Valid only when [RuntimeType](#RuntimeType) is `CLOUD`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String caCertificate; /** * Optional. Cloud KMS key name used for encrypting control plane data that is stored in a multi * region. Only used for the data residency region "US" or "EU". If not specified or * [BillingType](#BillingType) is `EVALUATION`, a Google-Managed encryption key will be used. * Format: `projects/locations/keyRings/cryptoKeys` * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String controlPlaneEncryptionKeyName; /** * Output only. Time that the Apigee organization was created in milliseconds since epoch. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long createdAt; /** * Not used by Apigee. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerName; /** * Optional. Description of the Apigee organization. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Optional. Flag that specifies whether the VPC Peering through Private Google Access should be * disabled between the consumer network and Apigee. Valid only when RuntimeType is set to CLOUD. * Required if an authorizedNetwork on the consumer project is not provided, in which case the * flag should be set to true. The value must be set before the creation of any Apigee runtime * instance and can be updated only when there are no runtime instances. **Note:** Apigee will be * deprecating the vpc peering model that requires you to provide 'authorizedNetwork', by making * the non-peering model as the default way of provisioning Apigee organization in future. So, * this will be a temporary flag to enable the transition. Not supported for Apigee hybrid. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean disableVpcPeering; /** * Optional. Display name for the Apigee organization. Unused, but reserved for future use. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String displayName; /** * Output only. List of environments in the Apigee organization. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> environments; /** * Output only. Time that the Apigee organization is scheduled for deletion. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long expiresAt; /** * Output only. Time that the Apigee organization was last modified in milliseconds since epoch. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long lastModifiedAt; /** * Output only. Name of the Apigee organization. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Optional. Flag that specifies if internet egress is restricted for VPC Service Controls. Valid * only when runtime_type is `CLOUD` and disable_vpc_peering is `true`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean networkEgressRestricted; /** * Optional. Configuration for the Portals settings. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean portalDisabled; /** * Output only. Project ID associated with the Apigee organization. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String projectId; /** * Optional. Properties defined in the Apigee organization profile. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudApigeeV1Properties properties; /** * Optional. Cloud KMS key name used for encrypting the data that is stored and replicated across * runtime instances. Update is not allowed after the organization is created. If not specified or * [RuntimeType](#RuntimeType) is `TRIAL`, a Google-Managed encryption key will be used. For * example: "projects/foo/locations/us/keyRings/bar/cryptoKeys/baz". **Note:** Not supported for * Apigee hybrid. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeDatabaseEncryptionKeyName; /** * Required. Runtime type of the Apigee organization based on the Apigee subscription purchased. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeType; /** * Output only. State of the organization. Values other than ACTIVE means the resource is not * ready to use. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String state; /** * Output only. Subscription plan that the customer has purchased. Output only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String subscriptionPlan; /** * Output only. DEPRECATED: This will eventually be replaced by BillingType. Subscription type of * the Apigee organization. Valid values include trial (free, limited, and for evaluation purposes * only) or paid (full subscription has been purchased). See [Apigee * pricing](https://cloud.google.com/apigee/pricing/). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String subscriptionType; /** * Not used by Apigee. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String type; /** * Optional. Addon configurations of the Apigee organization. * @return value or {@code null} for none */ public GoogleCloudApigeeV1AddonsConfig getAddonsConfig() { return addonsConfig; } /** * Optional. Addon configurations of the Apigee organization. * @param addonsConfig addonsConfig or {@code null} for none */ public GoogleCloudApigeeV1Organization setAddonsConfig(GoogleCloudApigeeV1AddonsConfig addonsConfig) { this.addonsConfig = addonsConfig; return this; } /** * Required. DEPRECATED: This field will eventually be deprecated and replaced with a differently- * named field. Primary Google Cloud region for analytics data storage. For valid values, see * [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get- * started/create-org). * @return value or {@code null} for none */ public java.lang.String getAnalyticsRegion() { return analyticsRegion; } /** * Required. DEPRECATED: This field will eventually be deprecated and replaced with a differently- * named field. Primary Google Cloud region for analytics data storage. For valid values, see * [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get- * started/create-org). * @param analyticsRegion analyticsRegion or {@code null} for none */ public GoogleCloudApigeeV1Organization setAnalyticsRegion(java.lang.String analyticsRegion) { this.analyticsRegion = analyticsRegion; return this; } /** * Optional. Cloud KMS key name used for encrypting API consumer data. If not specified or * [BillingType](#BillingType) is `EVALUATION`, a Google-Managed encryption key will be used. * Format: `projects/locations/keyRings/cryptoKeys` * @return value or {@code null} for none */ public java.lang.String getApiConsumerDataEncryptionKeyName() { return apiConsumerDataEncryptionKeyName; } /** * Optional. Cloud KMS key name used for encrypting API consumer data. If not specified or * [BillingType](#BillingType) is `EVALUATION`, a Google-Managed encryption key will be used. * Format: `projects/locations/keyRings/cryptoKeys` * @param apiConsumerDataEncryptionKeyName apiConsumerDataEncryptionKeyName or {@code null} for none */ public GoogleCloudApigeeV1Organization setApiConsumerDataEncryptionKeyName(java.lang.String apiConsumerDataEncryptionKeyName) { this.apiConsumerDataEncryptionKeyName = apiConsumerDataEncryptionKeyName; return this; } /** * Optional. This field is needed only for customers using non-default data residency regions. * Apigee stores some control plane data only in single region. This field determines which single * region Apigee should use. For example: "us-west1" when control plane is in US or "europe-west2" * when control plane is in EU. * @return value or {@code null} for none */ public java.lang.String getApiConsumerDataLocation() { return apiConsumerDataLocation; } /** * Optional. This field is needed only for customers using non-default data residency regions. * Apigee stores some control plane data only in single region. This field determines which single * region Apigee should use. For example: "us-west1" when control plane is in US or "europe-west2" * when control plane is in EU. * @param apiConsumerDataLocation apiConsumerDataLocation or {@code null} for none */ public GoogleCloudApigeeV1Organization setApiConsumerDataLocation(java.lang.String apiConsumerDataLocation) { this.apiConsumerDataLocation = apiConsumerDataLocation; return this; } /** * Output only. Apigee Project ID associated with the organization. Use this project to allowlist * Apigee in the Service Attachment when using private service connect with Apigee. * @return value or {@code null} for none */ public java.lang.String getApigeeProjectId() { return apigeeProjectId; } /** * Output only. Apigee Project ID associated with the organization. Use this project to allowlist * Apigee in the Service Attachment when using private service connect with Apigee. * @param apigeeProjectId apigeeProjectId or {@code null} for none */ public GoogleCloudApigeeV1Organization setApigeeProjectId(java.lang.String apigeeProjectId) { this.apigeeProjectId = apigeeProjectId; return this; } /** * Not used by Apigee. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getAttributes() { return attributes; } /** * Not used by Apigee. * @param attributes attributes or {@code null} for none */ public GoogleCloudApigeeV1Organization setAttributes(java.util.List<java.lang.String> attributes) { this.attributes = attributes; return this; } /** * Optional. Compute Engine network used for Service Networking to be peered with Apigee runtime * instances. See [Getting started with the Service Networking * API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). * Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the * creation of a runtime instance and can be updated only when there are no runtime instances. For * example: `default`. When changing authorizedNetwork, you must reconfigure VPC peering. After * VPC peering with previous network is deleted, [run the following * command](https://cloud.google.com/sdk/gcloud/reference/services/vpc-peerings/delete): `gcloud * services vpc-peerings delete --network=NETWORK`, where `NETWORK` is the name of the previous * network. This will delete the previous Service Networking. Otherwise, you will get the * following error: `The resource 'projects/...-tp' is already linked to another shared VPC host * 'projects/...-tp`. Apigee also supports shared VPC (that is, the host network project is not * the same as the one that is peering with Apigee). See [Shared VPC * overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the * following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: * `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee * hybrid. * @return value or {@code null} for none */ public java.lang.String getAuthorizedNetwork() { return authorizedNetwork; } /** * Optional. Compute Engine network used for Service Networking to be peered with Apigee runtime * instances. See [Getting started with the Service Networking * API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). * Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the * creation of a runtime instance and can be updated only when there are no runtime instances. For * example: `default`. When changing authorizedNetwork, you must reconfigure VPC peering. After * VPC peering with previous network is deleted, [run the following * command](https://cloud.google.com/sdk/gcloud/reference/services/vpc-peerings/delete): `gcloud * services vpc-peerings delete --network=NETWORK`, where `NETWORK` is the name of the previous * network. This will delete the previous Service Networking. Otherwise, you will get the * following error: `The resource 'projects/...-tp' is already linked to another shared VPC host * 'projects/...-tp`. Apigee also supports shared VPC (that is, the host network project is not * the same as the one that is peering with Apigee). See [Shared VPC * overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the * following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: * `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee * hybrid. * @param authorizedNetwork authorizedNetwork or {@code null} for none */ public GoogleCloudApigeeV1Organization setAuthorizedNetwork(java.lang.String authorizedNetwork) { this.authorizedNetwork = authorizedNetwork; return this; } /** * Optional. Billing type of the Apigee organization. See [Apigee * pricing](https://cloud.google.com/apigee/pricing). * @return value or {@code null} for none */ public java.lang.String getBillingType() { return billingType; } /** * Optional. Billing type of the Apigee organization. See [Apigee * pricing](https://cloud.google.com/apigee/pricing). * @param billingType billingType or {@code null} for none */ public GoogleCloudApigeeV1Organization setBillingType(java.lang.String billingType) { this.billingType = billingType; return this; } /** * Output only. Base64-encoded public certificate for the root CA of the Apigee organization. * Valid only when [RuntimeType](#RuntimeType) is `CLOUD`. * @see #decodeCaCertificate() * @return value or {@code null} for none */ public java.lang.String getCaCertificate() { return caCertificate; } /** * Output only. Base64-encoded public certificate for the root CA of the Apigee organization. * Valid only when [RuntimeType](#RuntimeType) is `CLOUD`. * @see #getCaCertificate() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeCaCertificate() { return com.google.api.client.util.Base64.decodeBase64(caCertificate); } /** * Output only. Base64-encoded public certificate for the root CA of the Apigee organization. * Valid only when [RuntimeType](#RuntimeType) is `CLOUD`. * @see #encodeCaCertificate() * @param caCertificate caCertificate or {@code null} for none */ public GoogleCloudApigeeV1Organization setCaCertificate(java.lang.String caCertificate) { this.caCertificate = caCertificate; return this; } /** * Output only. Base64-encoded public certificate for the root CA of the Apigee organization. * Valid only when [RuntimeType](#RuntimeType) is `CLOUD`. * @see #setCaCertificate() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public GoogleCloudApigeeV1Organization encodeCaCertificate(byte[] caCertificate) { this.caCertificate = com.google.api.client.util.Base64.encodeBase64URLSafeString(caCertificate); return this; } /** * Optional. Cloud KMS key name used for encrypting control plane data that is stored in a multi * region. Only used for the data residency region "US" or "EU". If not specified or * [BillingType](#BillingType) is `EVALUATION`, a Google-Managed encryption key will be used. * Format: `projects/locations/keyRings/cryptoKeys` * @return value or {@code null} for none */ public java.lang.String getControlPlaneEncryptionKeyName() { return controlPlaneEncryptionKeyName; } /** * Optional. Cloud KMS key name used for encrypting control plane data that is stored in a multi * region. Only used for the data residency region "US" or "EU". If not specified or * [BillingType](#BillingType) is `EVALUATION`, a Google-Managed encryption key will be used. * Format: `projects/locations/keyRings/cryptoKeys` * @param controlPlaneEncryptionKeyName controlPlaneEncryptionKeyName or {@code null} for none */ public GoogleCloudApigeeV1Organization setControlPlaneEncryptionKeyName(java.lang.String controlPlaneEncryptionKeyName) { this.controlPlaneEncryptionKeyName = controlPlaneEncryptionKeyName; return this; } /** * Output only. Time that the Apigee organization was created in milliseconds since epoch. * @return value or {@code null} for none */ public java.lang.Long getCreatedAt() { return createdAt; } /** * Output only. Time that the Apigee organization was created in milliseconds since epoch. * @param createdAt createdAt or {@code null} for none */ public GoogleCloudApigeeV1Organization setCreatedAt(java.lang.Long createdAt) { this.createdAt = createdAt; return this; } /** * Not used by Apigee. * @return value or {@code null} for none */ public java.lang.String getCustomerName() { return customerName; } /** * Not used by Apigee. * @param customerName customerName or {@code null} for none */ public GoogleCloudApigeeV1Organization setCustomerName(java.lang.String customerName) { this.customerName = customerName; return this; } /** * Optional. Description of the Apigee organization. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * Optional. Description of the Apigee organization. * @param description description or {@code null} for none */ public GoogleCloudApigeeV1Organization setDescription(java.lang.String description) { this.description = description; return this; } /** * Optional. Flag that specifies whether the VPC Peering through Private Google Access should be * disabled between the consumer network and Apigee. Valid only when RuntimeType is set to CLOUD. * Required if an authorizedNetwork on the consumer project is not provided, in which case the * flag should be set to true. The value must be set before the creation of any Apigee runtime * instance and can be updated only when there are no runtime instances. **Note:** Apigee will be * deprecating the vpc peering model that requires you to provide 'authorizedNetwork', by making * the non-peering model as the default way of provisioning Apigee organization in future. So, * this will be a temporary flag to enable the transition. Not supported for Apigee hybrid. * @return value or {@code null} for none */ public java.lang.Boolean getDisableVpcPeering() { return disableVpcPeering; } /** * Optional. Flag that specifies whether the VPC Peering through Private Google Access should be * disabled between the consumer network and Apigee. Valid only when RuntimeType is set to CLOUD. * Required if an authorizedNetwork on the consumer project is not provided, in which case the * flag should be set to true. The value must be set before the creation of any Apigee runtime * instance and can be updated only when there are no runtime instances. **Note:** Apigee will be * deprecating the vpc peering model that requires you to provide 'authorizedNetwork', by making * the non-peering model as the default way of provisioning Apigee organization in future. So, * this will be a temporary flag to enable the transition. Not supported for Apigee hybrid. * @param disableVpcPeering disableVpcPeering or {@code null} for none */ public GoogleCloudApigeeV1Organization setDisableVpcPeering(java.lang.Boolean disableVpcPeering) { this.disableVpcPeering = disableVpcPeering; return this; } /** * Optional. Display name for the Apigee organization. Unused, but reserved for future use. * @return value or {@code null} for none */ public java.lang.String getDisplayName() { return displayName; } /** * Optional. Display name for the Apigee organization. Unused, but reserved for future use. * @param displayName displayName or {@code null} for none */ public GoogleCloudApigeeV1Organization setDisplayName(java.lang.String displayName) { this.displayName = displayName; return this; } /** * Output only. List of environments in the Apigee organization. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getEnvironments() { return environments; } /** * Output only. List of environments in the Apigee organization. * @param environments environments or {@code null} for none */ public GoogleCloudApigeeV1Organization setEnvironments(java.util.List<java.lang.String> environments) { this.environments = environments; return this; } /** * Output only. Time that the Apigee organization is scheduled for deletion. * @return value or {@code null} for none */ public java.lang.Long getExpiresAt() { return expiresAt; } /** * Output only. Time that the Apigee organization is scheduled for deletion. * @param expiresAt expiresAt or {@code null} for none */ public GoogleCloudApigeeV1Organization setExpiresAt(java.lang.Long expiresAt) { this.expiresAt = expiresAt; return this; } /** * Output only. Time that the Apigee organization was last modified in milliseconds since epoch. * @return value or {@code null} for none */ public java.lang.Long getLastModifiedAt() { return lastModifiedAt; } /** * Output only. Time that the Apigee organization was last modified in milliseconds since epoch. * @param lastModifiedAt lastModifiedAt or {@code null} for none */ public GoogleCloudApigeeV1Organization setLastModifiedAt(java.lang.Long lastModifiedAt) { this.lastModifiedAt = lastModifiedAt; return this; } /** * Output only. Name of the Apigee organization. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Output only. Name of the Apigee organization. * @param name name or {@code null} for none */ public GoogleCloudApigeeV1Organization setName(java.lang.String name) { this.name = name; return this; } /** * Optional. Flag that specifies if internet egress is restricted for VPC Service Controls. Valid * only when runtime_type is `CLOUD` and disable_vpc_peering is `true`. * @return value or {@code null} for none */ public java.lang.Boolean getNetworkEgressRestricted() { return networkEgressRestricted; } /** * Optional. Flag that specifies if internet egress is restricted for VPC Service Controls. Valid * only when runtime_type is `CLOUD` and disable_vpc_peering is `true`. * @param networkEgressRestricted networkEgressRestricted or {@code null} for none */ public GoogleCloudApigeeV1Organization setNetworkEgressRestricted(java.lang.Boolean networkEgressRestricted) { this.networkEgressRestricted = networkEgressRestricted; return this; } /** * Optional. Configuration for the Portals settings. * @return value or {@code null} for none */ public java.lang.Boolean getPortalDisabled() { return portalDisabled; } /** * Optional. Configuration for the Portals settings. * @param portalDisabled portalDisabled or {@code null} for none */ public GoogleCloudApigeeV1Organization setPortalDisabled(java.lang.Boolean portalDisabled) { this.portalDisabled = portalDisabled; return this; } /** * Output only. Project ID associated with the Apigee organization. * @return value or {@code null} for none */ public java.lang.String getProjectId() { return projectId; } /** * Output only. Project ID associated with the Apigee organization. * @param projectId projectId or {@code null} for none */ public GoogleCloudApigeeV1Organization setProjectId(java.lang.String projectId) { this.projectId = projectId; return this; } /** * Optional. Properties defined in the Apigee organization profile. * @return value or {@code null} for none */ public GoogleCloudApigeeV1Properties getProperties() { return properties; } /** * Optional. Properties defined in the Apigee organization profile. * @param properties properties or {@code null} for none */ public GoogleCloudApigeeV1Organization setProperties(GoogleCloudApigeeV1Properties properties) { this.properties = properties; return this; } /** * Optional. Cloud KMS key name used for encrypting the data that is stored and replicated across * runtime instances. Update is not allowed after the organization is created. If not specified or * [RuntimeType](#RuntimeType) is `TRIAL`, a Google-Managed encryption key will be used. For * example: "projects/foo/locations/us/keyRings/bar/cryptoKeys/baz". **Note:** Not supported for * Apigee hybrid. * @return value or {@code null} for none */ public java.lang.String getRuntimeDatabaseEncryptionKeyName() { return runtimeDatabaseEncryptionKeyName; } /** * Optional. Cloud KMS key name used for encrypting the data that is stored and replicated across * runtime instances. Update is not allowed after the organization is created. If not specified or * [RuntimeType](#RuntimeType) is `TRIAL`, a Google-Managed encryption key will be used. For * example: "projects/foo/locations/us/keyRings/bar/cryptoKeys/baz". **Note:** Not supported for * Apigee hybrid. * @param runtimeDatabaseEncryptionKeyName runtimeDatabaseEncryptionKeyName or {@code null} for none */ public GoogleCloudApigeeV1Organization setRuntimeDatabaseEncryptionKeyName(java.lang.String runtimeDatabaseEncryptionKeyName) { this.runtimeDatabaseEncryptionKeyName = runtimeDatabaseEncryptionKeyName; return this; } /** * Required. Runtime type of the Apigee organization based on the Apigee subscription purchased. * @return value or {@code null} for none */ public java.lang.String getRuntimeType() { return runtimeType; } /** * Required. Runtime type of the Apigee organization based on the Apigee subscription purchased. * @param runtimeType runtimeType or {@code null} for none */ public GoogleCloudApigeeV1Organization setRuntimeType(java.lang.String runtimeType) { this.runtimeType = runtimeType; return this; } /** * Output only. State of the organization. Values other than ACTIVE means the resource is not * ready to use. * @return value or {@code null} for none */ public java.lang.String getState() { return state; } /** * Output only. State of the organization. Values other than ACTIVE means the resource is not * ready to use. * @param state state or {@code null} for none */ public GoogleCloudApigeeV1Organization setState(java.lang.String state) { this.state = state; return this; } /** * Output only. Subscription plan that the customer has purchased. Output only. * @return value or {@code null} for none */ public java.lang.String getSubscriptionPlan() { return subscriptionPlan; } /** * Output only. Subscription plan that the customer has purchased. Output only. * @param subscriptionPlan subscriptionPlan or {@code null} for none */ public GoogleCloudApigeeV1Organization setSubscriptionPlan(java.lang.String subscriptionPlan) { this.subscriptionPlan = subscriptionPlan; return this; } /** * Output only. DEPRECATED: This will eventually be replaced by BillingType. Subscription type of * the Apigee organization. Valid values include trial (free, limited, and for evaluation purposes * only) or paid (full subscription has been purchased). See [Apigee * pricing](https://cloud.google.com/apigee/pricing/). * @return value or {@code null} for none */ public java.lang.String getSubscriptionType() { return subscriptionType; } /** * Output only. DEPRECATED: This will eventually be replaced by BillingType. Subscription type of * the Apigee organization. Valid values include trial (free, limited, and for evaluation purposes * only) or paid (full subscription has been purchased). See [Apigee * pricing](https://cloud.google.com/apigee/pricing/). * @param subscriptionType subscriptionType or {@code null} for none */ public GoogleCloudApigeeV1Organization setSubscriptionType(java.lang.String subscriptionType) { this.subscriptionType = subscriptionType; return this; } /** * Not used by Apigee. * @return value or {@code null} for none */ public java.lang.String getType() { return type; } /** * Not used by Apigee. * @param type type or {@code null} for none */ public GoogleCloudApigeeV1Organization setType(java.lang.String type) { this.type = type; return this; } @Override public GoogleCloudApigeeV1Organization set(String fieldName, Object value) { return (GoogleCloudApigeeV1Organization) super.set(fieldName, value); } @Override public GoogleCloudApigeeV1Organization clone() { return (GoogleCloudApigeeV1Organization) super.clone(); } }
googleapis/java-genai
35,167
src/main/java/com/google/genai/TokensConverters.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Auto-generated code. Do not edit. package com.google.genai; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; final class TokensConverters { private final ApiClient apiClient; public TokensConverters(ApiClient apiClient) { this.apiClient = apiClient; } @ExcludeFromGeneratedCoverageReport ObjectNode prebuiltVoiceConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"voiceName"}) != null) { Common.setValueByPath( toObject, new String[] {"voiceName"}, Common.getValueByPath(fromObject, new String[] {"voiceName"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode voiceConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"prebuiltVoiceConfig"}) != null) { Common.setValueByPath( toObject, new String[] {"prebuiltVoiceConfig"}, prebuiltVoiceConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"prebuiltVoiceConfig"})), toObject)); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode speakerVoiceConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"speaker"}) != null) { Common.setValueByPath( toObject, new String[] {"speaker"}, Common.getValueByPath(fromObject, new String[] {"speaker"})); } if (Common.getValueByPath(fromObject, new String[] {"voiceConfig"}) != null) { Common.setValueByPath( toObject, new String[] {"voiceConfig"}, voiceConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"voiceConfig"})), toObject)); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode multiSpeakerVoiceConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"speakerVoiceConfigs"}) != null) { ArrayNode keyArray = (ArrayNode) Common.getValueByPath(fromObject, new String[] {"speakerVoiceConfigs"}); ObjectMapper objectMapper = new ObjectMapper(); ArrayNode result = objectMapper.createArrayNode(); for (JsonNode item : keyArray) { result.add(speakerVoiceConfigToMldev(JsonSerializable.toJsonNode(item), toObject)); } Common.setValueByPath(toObject, new String[] {"speakerVoiceConfigs"}, result); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode speechConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"voiceConfig"}) != null) { Common.setValueByPath( toObject, new String[] {"voiceConfig"}, voiceConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"voiceConfig"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"multiSpeakerVoiceConfig"}) != null) { Common.setValueByPath( toObject, new String[] {"multiSpeakerVoiceConfig"}, multiSpeakerVoiceConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"multiSpeakerVoiceConfig"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"languageCode"}) != null) { Common.setValueByPath( toObject, new String[] {"languageCode"}, Common.getValueByPath(fromObject, new String[] {"languageCode"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode videoMetadataToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"fps"}) != null) { Common.setValueByPath( toObject, new String[] {"fps"}, Common.getValueByPath(fromObject, new String[] {"fps"})); } if (Common.getValueByPath(fromObject, new String[] {"endOffset"}) != null) { Common.setValueByPath( toObject, new String[] {"endOffset"}, Common.getValueByPath(fromObject, new String[] {"endOffset"})); } if (Common.getValueByPath(fromObject, new String[] {"startOffset"}) != null) { Common.setValueByPath( toObject, new String[] {"startOffset"}, Common.getValueByPath(fromObject, new String[] {"startOffset"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode blobToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (!Common.isZero(Common.getValueByPath(fromObject, new String[] {"displayName"}))) { throw new IllegalArgumentException("displayName parameter is not supported in Gemini API."); } if (Common.getValueByPath(fromObject, new String[] {"data"}) != null) { Common.setValueByPath( toObject, new String[] {"data"}, Common.getValueByPath(fromObject, new String[] {"data"})); } if (Common.getValueByPath(fromObject, new String[] {"mimeType"}) != null) { Common.setValueByPath( toObject, new String[] {"mimeType"}, Common.getValueByPath(fromObject, new String[] {"mimeType"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode fileDataToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (!Common.isZero(Common.getValueByPath(fromObject, new String[] {"displayName"}))) { throw new IllegalArgumentException("displayName parameter is not supported in Gemini API."); } if (Common.getValueByPath(fromObject, new String[] {"fileUri"}) != null) { Common.setValueByPath( toObject, new String[] {"fileUri"}, Common.getValueByPath(fromObject, new String[] {"fileUri"})); } if (Common.getValueByPath(fromObject, new String[] {"mimeType"}) != null) { Common.setValueByPath( toObject, new String[] {"mimeType"}, Common.getValueByPath(fromObject, new String[] {"mimeType"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode partToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"videoMetadata"}) != null) { Common.setValueByPath( toObject, new String[] {"videoMetadata"}, videoMetadataToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"videoMetadata"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"thought"}) != null) { Common.setValueByPath( toObject, new String[] {"thought"}, Common.getValueByPath(fromObject, new String[] {"thought"})); } if (Common.getValueByPath(fromObject, new String[] {"inlineData"}) != null) { Common.setValueByPath( toObject, new String[] {"inlineData"}, blobToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"inlineData"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"fileData"}) != null) { Common.setValueByPath( toObject, new String[] {"fileData"}, fileDataToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"fileData"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"thoughtSignature"}) != null) { Common.setValueByPath( toObject, new String[] {"thoughtSignature"}, Common.getValueByPath(fromObject, new String[] {"thoughtSignature"})); } if (Common.getValueByPath(fromObject, new String[] {"codeExecutionResult"}) != null) { Common.setValueByPath( toObject, new String[] {"codeExecutionResult"}, Common.getValueByPath(fromObject, new String[] {"codeExecutionResult"})); } if (Common.getValueByPath(fromObject, new String[] {"executableCode"}) != null) { Common.setValueByPath( toObject, new String[] {"executableCode"}, Common.getValueByPath(fromObject, new String[] {"executableCode"})); } if (Common.getValueByPath(fromObject, new String[] {"functionCall"}) != null) { Common.setValueByPath( toObject, new String[] {"functionCall"}, Common.getValueByPath(fromObject, new String[] {"functionCall"})); } if (Common.getValueByPath(fromObject, new String[] {"functionResponse"}) != null) { Common.setValueByPath( toObject, new String[] {"functionResponse"}, Common.getValueByPath(fromObject, new String[] {"functionResponse"})); } if (Common.getValueByPath(fromObject, new String[] {"text"}) != null) { Common.setValueByPath( toObject, new String[] {"text"}, Common.getValueByPath(fromObject, new String[] {"text"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode contentToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"parts"}) != null) { ArrayNode keyArray = (ArrayNode) Common.getValueByPath(fromObject, new String[] {"parts"}); ObjectMapper objectMapper = new ObjectMapper(); ArrayNode result = objectMapper.createArrayNode(); for (JsonNode item : keyArray) { result.add(partToMldev(JsonSerializable.toJsonNode(item), toObject)); } Common.setValueByPath(toObject, new String[] {"parts"}, result); } if (Common.getValueByPath(fromObject, new String[] {"role"}) != null) { Common.setValueByPath( toObject, new String[] {"role"}, Common.getValueByPath(fromObject, new String[] {"role"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode functionDeclarationToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"behavior"}) != null) { Common.setValueByPath( toObject, new String[] {"behavior"}, Common.getValueByPath(fromObject, new String[] {"behavior"})); } if (Common.getValueByPath(fromObject, new String[] {"description"}) != null) { Common.setValueByPath( toObject, new String[] {"description"}, Common.getValueByPath(fromObject, new String[] {"description"})); } if (Common.getValueByPath(fromObject, new String[] {"name"}) != null) { Common.setValueByPath( toObject, new String[] {"name"}, Common.getValueByPath(fromObject, new String[] {"name"})); } if (Common.getValueByPath(fromObject, new String[] {"parameters"}) != null) { Common.setValueByPath( toObject, new String[] {"parameters"}, Common.getValueByPath(fromObject, new String[] {"parameters"})); } if (Common.getValueByPath(fromObject, new String[] {"parametersJsonSchema"}) != null) { Common.setValueByPath( toObject, new String[] {"parametersJsonSchema"}, Common.getValueByPath(fromObject, new String[] {"parametersJsonSchema"})); } if (Common.getValueByPath(fromObject, new String[] {"response"}) != null) { Common.setValueByPath( toObject, new String[] {"response"}, Common.getValueByPath(fromObject, new String[] {"response"})); } if (Common.getValueByPath(fromObject, new String[] {"responseJsonSchema"}) != null) { Common.setValueByPath( toObject, new String[] {"responseJsonSchema"}, Common.getValueByPath(fromObject, new String[] {"responseJsonSchema"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode intervalToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"startTime"}) != null) { Common.setValueByPath( toObject, new String[] {"startTime"}, Common.getValueByPath(fromObject, new String[] {"startTime"})); } if (Common.getValueByPath(fromObject, new String[] {"endTime"}) != null) { Common.setValueByPath( toObject, new String[] {"endTime"}, Common.getValueByPath(fromObject, new String[] {"endTime"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode googleSearchToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"timeRangeFilter"}) != null) { Common.setValueByPath( toObject, new String[] {"timeRangeFilter"}, intervalToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"timeRangeFilter"})), toObject)); } if (!Common.isZero(Common.getValueByPath(fromObject, new String[] {"excludeDomains"}))) { throw new IllegalArgumentException( "excludeDomains parameter is not supported in Gemini API."); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode dynamicRetrievalConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"mode"}) != null) { Common.setValueByPath( toObject, new String[] {"mode"}, Common.getValueByPath(fromObject, new String[] {"mode"})); } if (Common.getValueByPath(fromObject, new String[] {"dynamicThreshold"}) != null) { Common.setValueByPath( toObject, new String[] {"dynamicThreshold"}, Common.getValueByPath(fromObject, new String[] {"dynamicThreshold"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode googleSearchRetrievalToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"dynamicRetrievalConfig"}) != null) { Common.setValueByPath( toObject, new String[] {"dynamicRetrievalConfig"}, dynamicRetrievalConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"dynamicRetrievalConfig"})), toObject)); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode urlContextToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode toolComputerUseToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"environment"}) != null) { Common.setValueByPath( toObject, new String[] {"environment"}, Common.getValueByPath(fromObject, new String[] {"environment"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode toolToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"functionDeclarations"}) != null) { ArrayNode keyArray = (ArrayNode) Common.getValueByPath(fromObject, new String[] {"functionDeclarations"}); ObjectMapper objectMapper = new ObjectMapper(); ArrayNode result = objectMapper.createArrayNode(); for (JsonNode item : keyArray) { result.add(functionDeclarationToMldev(JsonSerializable.toJsonNode(item), toObject)); } Common.setValueByPath(toObject, new String[] {"functionDeclarations"}, result); } if (!Common.isZero(Common.getValueByPath(fromObject, new String[] {"retrieval"}))) { throw new IllegalArgumentException("retrieval parameter is not supported in Gemini API."); } if (Common.getValueByPath(fromObject, new String[] {"googleSearch"}) != null) { Common.setValueByPath( toObject, new String[] {"googleSearch"}, googleSearchToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"googleSearch"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"googleSearchRetrieval"}) != null) { Common.setValueByPath( toObject, new String[] {"googleSearchRetrieval"}, googleSearchRetrievalToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"googleSearchRetrieval"})), toObject)); } if (!Common.isZero(Common.getValueByPath(fromObject, new String[] {"enterpriseWebSearch"}))) { throw new IllegalArgumentException( "enterpriseWebSearch parameter is not supported in Gemini API."); } if (!Common.isZero(Common.getValueByPath(fromObject, new String[] {"googleMaps"}))) { throw new IllegalArgumentException("googleMaps parameter is not supported in Gemini API."); } if (Common.getValueByPath(fromObject, new String[] {"urlContext"}) != null) { Common.setValueByPath( toObject, new String[] {"urlContext"}, urlContextToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"urlContext"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"computerUse"}) != null) { Common.setValueByPath( toObject, new String[] {"computerUse"}, toolComputerUseToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"computerUse"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"codeExecution"}) != null) { Common.setValueByPath( toObject, new String[] {"codeExecution"}, Common.getValueByPath(fromObject, new String[] {"codeExecution"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode sessionResumptionConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"handle"}) != null) { Common.setValueByPath( toObject, new String[] {"handle"}, Common.getValueByPath(fromObject, new String[] {"handle"})); } if (!Common.isZero(Common.getValueByPath(fromObject, new String[] {"transparent"}))) { throw new IllegalArgumentException("transparent parameter is not supported in Gemini API."); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode audioTranscriptionConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode automaticActivityDetectionToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"disabled"}) != null) { Common.setValueByPath( toObject, new String[] {"disabled"}, Common.getValueByPath(fromObject, new String[] {"disabled"})); } if (Common.getValueByPath(fromObject, new String[] {"startOfSpeechSensitivity"}) != null) { Common.setValueByPath( toObject, new String[] {"startOfSpeechSensitivity"}, Common.getValueByPath(fromObject, new String[] {"startOfSpeechSensitivity"})); } if (Common.getValueByPath(fromObject, new String[] {"endOfSpeechSensitivity"}) != null) { Common.setValueByPath( toObject, new String[] {"endOfSpeechSensitivity"}, Common.getValueByPath(fromObject, new String[] {"endOfSpeechSensitivity"})); } if (Common.getValueByPath(fromObject, new String[] {"prefixPaddingMs"}) != null) { Common.setValueByPath( toObject, new String[] {"prefixPaddingMs"}, Common.getValueByPath(fromObject, new String[] {"prefixPaddingMs"})); } if (Common.getValueByPath(fromObject, new String[] {"silenceDurationMs"}) != null) { Common.setValueByPath( toObject, new String[] {"silenceDurationMs"}, Common.getValueByPath(fromObject, new String[] {"silenceDurationMs"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode realtimeInputConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"automaticActivityDetection"}) != null) { Common.setValueByPath( toObject, new String[] {"automaticActivityDetection"}, automaticActivityDetectionToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"automaticActivityDetection"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"activityHandling"}) != null) { Common.setValueByPath( toObject, new String[] {"activityHandling"}, Common.getValueByPath(fromObject, new String[] {"activityHandling"})); } if (Common.getValueByPath(fromObject, new String[] {"turnCoverage"}) != null) { Common.setValueByPath( toObject, new String[] {"turnCoverage"}, Common.getValueByPath(fromObject, new String[] {"turnCoverage"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode slidingWindowToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"targetTokens"}) != null) { Common.setValueByPath( toObject, new String[] {"targetTokens"}, Common.getValueByPath(fromObject, new String[] {"targetTokens"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode contextWindowCompressionConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"triggerTokens"}) != null) { Common.setValueByPath( toObject, new String[] {"triggerTokens"}, Common.getValueByPath(fromObject, new String[] {"triggerTokens"})); } if (Common.getValueByPath(fromObject, new String[] {"slidingWindow"}) != null) { Common.setValueByPath( toObject, new String[] {"slidingWindow"}, slidingWindowToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"slidingWindow"})), toObject)); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode proactivityConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"proactiveAudio"}) != null) { Common.setValueByPath( toObject, new String[] {"proactiveAudio"}, Common.getValueByPath(fromObject, new String[] {"proactiveAudio"})); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode liveConnectConfigToMldev(JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"responseModalities"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "responseModalities"}, Common.getValueByPath(fromObject, new String[] {"responseModalities"})); } if (Common.getValueByPath(fromObject, new String[] {"temperature"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "temperature"}, Common.getValueByPath(fromObject, new String[] {"temperature"})); } if (Common.getValueByPath(fromObject, new String[] {"topP"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "topP"}, Common.getValueByPath(fromObject, new String[] {"topP"})); } if (Common.getValueByPath(fromObject, new String[] {"topK"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "topK"}, Common.getValueByPath(fromObject, new String[] {"topK"})); } if (Common.getValueByPath(fromObject, new String[] {"maxOutputTokens"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "maxOutputTokens"}, Common.getValueByPath(fromObject, new String[] {"maxOutputTokens"})); } if (Common.getValueByPath(fromObject, new String[] {"mediaResolution"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "mediaResolution"}, Common.getValueByPath(fromObject, new String[] {"mediaResolution"})); } if (Common.getValueByPath(fromObject, new String[] {"seed"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "seed"}, Common.getValueByPath(fromObject, new String[] {"seed"})); } if (Common.getValueByPath(fromObject, new String[] {"speechConfig"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "speechConfig"}, speechConfigToMldev( JsonSerializable.toJsonNode( Transformers.tLiveSpeechConfig( Common.getValueByPath(fromObject, new String[] {"speechConfig"}))), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"enableAffectiveDialog"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "generationConfig", "enableAffectiveDialog"}, Common.getValueByPath(fromObject, new String[] {"enableAffectiveDialog"})); } if (Common.getValueByPath(fromObject, new String[] {"systemInstruction"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "systemInstruction"}, contentToMldev( JsonSerializable.toJsonNode( Transformers.tContent( Common.getValueByPath(fromObject, new String[] {"systemInstruction"}))), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"tools"}) != null) { ArrayNode keyArray = (ArrayNode) Transformers.tTools(Common.getValueByPath(fromObject, new String[] {"tools"})); ObjectMapper objectMapper = new ObjectMapper(); ArrayNode result = objectMapper.createArrayNode(); for (JsonNode item : keyArray) { result.add(toolToMldev(JsonSerializable.toJsonNode(Transformers.tTool(item)), toObject)); } Common.setValueByPath(parentObject, new String[] {"setup", "tools"}, result); } if (Common.getValueByPath(fromObject, new String[] {"sessionResumption"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "sessionResumption"}, sessionResumptionConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"sessionResumption"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"inputAudioTranscription"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "inputAudioTranscription"}, audioTranscriptionConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"inputAudioTranscription"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"outputAudioTranscription"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "outputAudioTranscription"}, audioTranscriptionConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"outputAudioTranscription"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"realtimeInputConfig"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "realtimeInputConfig"}, realtimeInputConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"realtimeInputConfig"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"contextWindowCompression"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "contextWindowCompression"}, contextWindowCompressionConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"contextWindowCompression"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"proactivity"}) != null) { Common.setValueByPath( parentObject, new String[] {"setup", "proactivity"}, proactivityConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"proactivity"})), toObject)); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode liveConnectConstraintsToMldev( ApiClient apiClient, JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"model"}) != null) { Common.setValueByPath( toObject, new String[] {"setup", "model"}, Transformers.tModel( this.apiClient, Common.getValueByPath(fromObject, new String[] {"model"}))); } if (Common.getValueByPath(fromObject, new String[] {"config"}) != null) { Common.setValueByPath( toObject, new String[] {"config"}, liveConnectConfigToMldev( JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"config"})), toObject)); } return toObject; } @ExcludeFromGeneratedCoverageReport ObjectNode createAuthTokenConfigToMldev( ApiClient apiClient, JsonNode fromObject, ObjectNode parentObject) { ObjectNode toObject = JsonSerializable.objectMapper.createObjectNode(); if (Common.getValueByPath(fromObject, new String[] {"expireTime"}) != null) { Common.setValueByPath( parentObject, new String[] {"expireTime"}, Common.getValueByPath(fromObject, new String[] {"expireTime"})); } if (Common.getValueByPath(fromObject, new String[] {"newSessionExpireTime"}) != null) { Common.setValueByPath( parentObject, new String[] {"newSessionExpireTime"}, Common.getValueByPath(fromObject, new String[] {"newSessionExpireTime"})); } if (Common.getValueByPath(fromObject, new String[] {"uses"}) != null) { Common.setValueByPath( parentObject, new String[] {"uses"}, Common.getValueByPath(fromObject, new String[] {"uses"})); } if (Common.getValueByPath(fromObject, new String[] {"liveConnectConstraints"}) != null) { Common.setValueByPath( parentObject, new String[] {"bidiGenerateContentSetup"}, liveConnectConstraintsToMldev( apiClient, JsonSerializable.toJsonNode( Common.getValueByPath(fromObject, new String[] {"liveConnectConstraints"})), toObject)); } if (Common.getValueByPath(fromObject, new String[] {"lockAdditionalFields"}) != null) { Common.setValueByPath( parentObject, new String[] {"fieldMask"}, Common.getValueByPath(fromObject, new String[] {"lockAdditionalFields"})); } return toObject; } }
apache/cxf
35,437
rt/transports/http-netty/netty-server/src/test/java/org/apache/cxf/transport/http/netty/server/NettyHttpDestinationTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.transport.http.netty.server; import java.io.IOException; import java.io.OutputStream; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.xml.namespace.QName; import jakarta.servlet.ServletInputStream; import jakarta.servlet.ServletOutputStream; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; import jakarta.xml.bind.JAXBElement; import org.apache.cxf.Bus; import org.apache.cxf.BusException; import org.apache.cxf.BusFactory; import org.apache.cxf.bus.extension.ExtensionManagerBus; import org.apache.cxf.common.util.StringUtils; import org.apache.cxf.configuration.security.AuthorizationPolicy; import org.apache.cxf.continuations.SuspendedInvocationException; import org.apache.cxf.endpoint.EndpointResolverRegistry; import org.apache.cxf.helpers.CastUtils; import org.apache.cxf.io.AbstractWrappedOutputStream; import org.apache.cxf.message.ExchangeImpl; import org.apache.cxf.message.Message; import org.apache.cxf.message.MessageImpl; import org.apache.cxf.policy.PolicyDataEngine; import org.apache.cxf.security.transport.TLSSessionInfo; import org.apache.cxf.service.model.EndpointInfo; import org.apache.cxf.service.model.ServiceInfo; import org.apache.cxf.transport.Conduit; import org.apache.cxf.transport.ConduitInitiator; import org.apache.cxf.transport.ConduitInitiatorManager; import org.apache.cxf.transport.Destination; import org.apache.cxf.transport.MessageObserver; import org.apache.cxf.transport.http.AbstractHTTPDestination; import org.apache.cxf.transport.http.ContinuationProviderFactory; import org.apache.cxf.transport.http.DestinationRegistry; import org.apache.cxf.transport.http.HTTPTransportFactory; import org.apache.cxf.transport.http.auth.DefaultBasicAuthSupplier; import org.apache.cxf.transport.http.netty.server.util.Utils; import org.apache.cxf.transports.http.configuration.HTTPServerPolicy; import org.apache.cxf.ws.addressing.AddressingProperties; import org.apache.cxf.ws.addressing.EndpointReferenceType; import org.apache.cxf.ws.addressing.EndpointReferenceUtils; import org.apache.cxf.ws.addressing.JAXWSAConstants; import org.junit.After; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class NettyHttpDestinationTest { protected static final String AUTH_HEADER = "Authorization"; protected static final String USER = "copernicus"; protected static final String PASSWD = "epicycles"; protected static final String BASIC_AUTH = DefaultBasicAuthSupplier.getBasicAuthHeader(USER, PASSWD); private static final String NOWHERE = "http://nada.nothing.nowhere.null/"; private static final String PAYLOAD = "message payload"; private static final String CHALLENGE_HEADER = "WWW-Authenticate"; private static final String BASIC_CHALLENGE = "Basic realm=terra"; private static final String DIGEST_CHALLENGE = "Digest realm=luna"; private static final String CUSTOM_CHALLENGE = "Custom realm=sol"; private Bus bus; private Bus threadDefaultBus; private Conduit decoupledBackChannel; private EndpointInfo endpointInfo; private EndpointReferenceType address; private NettyHttpServerEngine engine; private HTTPServerPolicy policy; private NettyHttpDestination destination; private HttpServletRequest request; private HttpServletResponse response; private Message inMessage; private Message outMessage; private MessageObserver observer; private ServletInputStream is; private ServletOutputStream os; private HTTPTransportFactory transportFactory; /** * This class replaces the engine in the Netty Http Destination. */ private class MockJettyHTTPDestination extends NettyHttpDestination { MockJettyHTTPDestination(Bus bus, DestinationRegistry registry, EndpointInfo endpointInfo, NettyHttpServerEngineFactory serverEngineFactory, NettyHttpServerEngine easyMockEngine) throws IOException { super(bus, registry, endpointInfo, serverEngineFactory); engine = easyMockEngine; } @Override public void retrieveEngine() { // Leave engine alone. } } @After public void tearDown() { if (bus != null) { bus.shutdown(true); } bus = null; transportFactory = null; decoupledBackChannel = null; address = null; engine = null; request = null; response = null; inMessage = null; outMessage = null; is = null; os = null; destination = null; BusFactory.setDefaultBus(null); } @Test public void testGetAddress() throws Exception { destination = setUpDestination(); EndpointReferenceType ref = destination.getAddress(); assertNotNull("unexpected null address", ref); assertEquals("unexpected address", EndpointReferenceUtils.getAddress(ref), StringUtils.addDefaultPortIfMissing(EndpointReferenceUtils.getAddress(address))); assertEquals("unexpected service name local part", EndpointReferenceUtils.getServiceName(ref, bus).getLocalPart(), "Service"); assertEquals("unexpected portName", EndpointReferenceUtils.getPortName(ref), "Port"); } @Test public void testRandomPortAllocation() throws Exception { bus = BusFactory.getDefaultBus(); transportFactory = new HTTPTransportFactory(); ServiceInfo serviceInfo = new ServiceInfo(); serviceInfo.setName(new QName("bla", "Service")); EndpointInfo ei = new EndpointInfo(serviceInfo, ""); ei.setName(new QName("bla", "Port")); Destination d1 = transportFactory.getDestination(ei, bus); URL url = new URL(d1.getAddress().getAddress().getValue()); assertTrue("No random port has been allocated", url.getPort() > 0); } @Test public void testSuspendedException() throws Exception { destination = setUpDestination(false, false); setUpDoService(false); final RuntimeException ex = new RuntimeException(); observer = new MessageObserver() { public void onMessage(Message m) { throw new SuspendedInvocationException(ex); } }; destination.setMessageObserver(observer); try { destination.doService(request, response); fail("Suspended invocation swallowed"); } catch (RuntimeException runtimeEx) { assertSame("Original exception is not preserved", ex, runtimeEx); } } @Test public void testContinuationsIgnored() throws Exception { HttpServletRequest httpRequest = mock(HttpServletRequest.class); ServiceInfo serviceInfo = new ServiceInfo(); serviceInfo.setName(new QName("bla", "Service")); EndpointInfo ei = new EndpointInfo(serviceInfo, ""); ei.setName(new QName("bla", "Port")); // Just create a fake engine final NettyHttpServerEngine httpEngine = new NettyHttpServerEngine("localhost", 8080); //httpEngine.setContinuationsEnabled(false); NettyHttpServerEngineFactory factory = new NettyHttpServerEngineFactory() { @Override public NettyHttpServerEngine retrieveNettyHttpServerEngine(int port) { return httpEngine; } }; transportFactory = new HTTPTransportFactory(); bus = BusFactory.getDefaultBus(); bus.setExtension(factory, NettyHttpServerEngineFactory.class); TestJettyDestination testDestination = new TestJettyDestination(bus, transportFactory.getRegistry(), ei, factory); testDestination.finalizeConfig(); Message mi = testDestination.retrieveFromContinuation(httpRequest); assertNull("Continuations must be ignored", mi); } @Test public void testGetMultiple() throws Exception { transportFactory = new HTTPTransportFactory(); bus = BusFactory.getDefaultBus(); ServiceInfo serviceInfo = new ServiceInfo(); serviceInfo.setName(new QName("bla", "Service")); EndpointInfo ei = new EndpointInfo(serviceInfo, ""); ei.setName(new QName("bla", "Port")); ei.setAddress("http://foo"); Destination d1 = transportFactory.getDestination(ei, bus); Destination d2 = transportFactory.getDestination(ei, bus); // Second get should not generate a new destination. It should just retrieve the existing one assertEquals(d1, d2); d2.shutdown(); Destination d3 = transportFactory.getDestination(ei, bus); // Now a new destination should have been created assertNotSame(d1, d3); } @Test public void testRemoveServant() throws Exception { destination = setUpDestination(); setUpRemoveServant(); destination.setMessageObserver(null); } @Test public void testDoServiceRedirectURL() throws Exception { destination = setUpDestination(false, false); setUpDoService(true); destination.doService(request, response); } @Test public void testDoService() throws Exception { Bus defaultBus = new ExtensionManagerBus(); assertSame("Default thread bus has not been set", defaultBus, BusFactory.getThreadDefaultBus()); destination = setUpDestination(false, false); setUpDoService(false); assertSame("Default thread bus has been unexpectedly reset", defaultBus, BusFactory.getThreadDefaultBus()); destination.doService(request, response); verifyDoService(); assertSame("Default thread bus has not been reset", defaultBus, BusFactory.getThreadDefaultBus()); } @Test public void testDoServiceWithHttpGET() throws Exception { destination = setUpDestination(false, false); setUpDoService(false, false, false, "GET", "?customerId=abc&cutomerAdd=def", 200); destination.doService(request, response); assertNotNull("unexpected null message", inMessage); assertEquals("unexpected method", inMessage.get(Message.HTTP_REQUEST_METHOD), "GET"); assertEquals("unexpected path", inMessage.get(Message.PATH_INFO), "/bar/foo"); assertEquals("unexpected query", inMessage.get(Message.QUERY_STRING), "?customerId=abc&cutomerAdd=def"); } @Test public void testGetAnonBackChannel() throws Exception { destination = setUpDestination(false, false); setUpDoService(false); destination.doService(request, response); setUpInMessage(); Conduit backChannel = destination.getBackChannel(inMessage); assertNotNull("expected back channel", backChannel); assertEquals("unexpected target", EndpointReferenceUtils.ANONYMOUS_ADDRESS, backChannel.getTarget().getAddress().getValue()); } @Test public void testGetBackChannelSend() throws Exception { destination = setUpDestination(false, false); setUpDoService(false, true); destination.doService(request, response); setUpInMessage(); Conduit backChannel = destination.getBackChannel(inMessage); outMessage = setUpOutMessage(); backChannel.prepare(outMessage); verifyBackChannelSend(backChannel, outMessage, 200); } @Test public void testGetBackChannelSendFault() throws Exception { destination = setUpDestination(false, false); setUpDoService(false, true, 500); destination.doService(request, response); setUpInMessage(); Conduit backChannel = destination.getBackChannel(inMessage); outMessage = setUpOutMessage(); backChannel.prepare(outMessage); verifyBackChannelSend(backChannel, outMessage, 500); } @Test public void testGetBackChannelSendOneway() throws Exception { destination = setUpDestination(false, false); setUpDoService(false, true, 500); destination.doService(request, response); setUpInMessage(); Conduit backChannel = destination.getBackChannel(inMessage); outMessage = setUpOutMessage(); backChannel.prepare(outMessage); verifyBackChannelSend(backChannel, outMessage, 500, true); } @Test public void testGetBackChannelSendDecoupled() throws Exception { destination = setUpDestination(false, false); setUpDoService(false, true, true, 202); destination.doService(request, response); setUpInMessage(); Message partialResponse = setUpOutMessage(); partialResponse.put(Message.PARTIAL_RESPONSE_MESSAGE, Boolean.TRUE); Conduit partialBackChannel = destination.getBackChannel(inMessage); partialBackChannel.prepare(partialResponse); verifyBackChannelSend(partialBackChannel, partialResponse, 202); outMessage = setUpOutMessage(); Conduit fullBackChannel = destination.getBackChannel(inMessage); fullBackChannel.prepare(outMessage); } @Test public void testServerPolicyInServiceModel() throws Exception { policy = new HTTPServerPolicy(); address = getEPR("bar/foo"); bus = new ExtensionManagerBus(); transportFactory = new HTTPTransportFactory(); ServiceInfo serviceInfo = new ServiceInfo(); serviceInfo.setName(new QName("bla", "Service")); endpointInfo = new EndpointInfo(serviceInfo, ""); endpointInfo.setName(new QName("bla", "Port")); endpointInfo.addExtensor(policy); engine = mock(NettyHttpServerEngine.class); endpointInfo.setAddress(NOWHERE + "bar/foo"); NettyHttpDestination dest = new MockJettyHTTPDestination( bus, transportFactory.getRegistry(), endpointInfo, null, engine); assertEquals(policy, dest.getServer()); } @Test public void testMultiplexGetAddressWithId() throws Exception { destination = setUpDestination(); final String id = "ID2"; EndpointReferenceType refWithId = destination.getAddressWithId(id); assertNotNull(refWithId); assertNotNull(refWithId.getReferenceParameters()); assertNotNull(refWithId.getReferenceParameters().getAny()); assertTrue("it is an element", refWithId.getReferenceParameters().getAny().get(0) instanceof JAXBElement); JAXBElement<?> el = (JAXBElement<?>) refWithId.getReferenceParameters().getAny().get(0); assertEquals("match our id", el.getValue(), id); } @Test public void testMultiplexGetAddressWithIdForAddress() throws Exception { destination = setUpDestination(); destination.setMultiplexWithAddress(true); final String id = "ID3"; EndpointReferenceType refWithId = destination.getAddressWithId(id); assertNotNull(refWithId); assertNull(refWithId.getReferenceParameters()); assertTrue("match our id", EndpointReferenceUtils.getAddress(refWithId).indexOf(id) != -1); } @Test public void testMultiplexGetIdForAddress() throws Exception { destination = setUpDestination(); destination.setMultiplexWithAddress(true); final String id = "ID3"; EndpointReferenceType refWithId = destination.getAddressWithId(id); String pathInfo = EndpointReferenceUtils.getAddress(refWithId); Map<String, Object> context = new HashMap<>(); assertNull("fails with no context", destination.getId(context)); context.put(Message.PATH_INFO, pathInfo); String result = destination.getId(context); assertNotNull(result); assertEquals("match our id", result, id); } @Test public void testMultiplexGetId() throws Exception { destination = setUpDestination(); final String id = "ID3"; EndpointReferenceType refWithId = destination.getAddressWithId(id); Map<String, Object> context = new HashMap<>(); assertNull("fails with no context", destination.getId(context)); AddressingProperties maps = mock(AddressingProperties.class); when(maps.getToEndpointReference()).thenReturn(refWithId); context.put(JAXWSAConstants.ADDRESSING_PROPERTIES_INBOUND, maps); String result = destination.getId(context); assertNotNull(result); assertEquals("match our id", result, id); } private NettyHttpDestination setUpDestination() throws Exception { return setUpDestination(false, false); }; private NettyHttpDestination setUpDestination( boolean contextMatchOnStem, boolean mockedBus) throws Exception { policy = new HTTPServerPolicy(); address = getEPR("bar/foo"); transportFactory = new HTTPTransportFactory(); final ConduitInitiator ci = new ConduitInitiator() { public Conduit getConduit(EndpointInfo targetInfo, Bus b) throws IOException { return decoupledBackChannel; } public Conduit getConduit(EndpointInfo localInfo, EndpointReferenceType target, Bus b) throws IOException { return decoupledBackChannel; } public List<String> getTransportIds() { return null; } public Set<String> getUriPrefixes() { return new HashSet<>(Collections.singletonList("http")); } }; ConduitInitiatorManager mgr = new ConduitInitiatorManager() { public void deregisterConduitInitiator(String name) { } public ConduitInitiator getConduitInitiator(String name) throws BusException { return null; } public ConduitInitiator getConduitInitiatorForUri(String uri) { return ci; } public void registerConduitInitiator(String name, ConduitInitiator factory) { } }; if (!mockedBus) { bus = new ExtensionManagerBus(); bus.setExtension(mgr, ConduitInitiatorManager.class); } else { bus = mock(Bus.class); when(bus.getExtension(EndpointResolverRegistry.class)).thenReturn(null); when(bus.getExtension(ContinuationProviderFactory.class)).thenReturn(null); when(bus.getExtension(PolicyDataEngine.class)).thenReturn(null); when(bus.hasExtensionByName("org.apache.cxf.ws.policy.PolicyEngine")).thenReturn(false); when(bus.getExtension(ClassLoader.class)).thenReturn(this.getClass().getClassLoader()); } engine = mock(NettyHttpServerEngine.class); ServiceInfo serviceInfo = new ServiceInfo(); serviceInfo.setName(new QName("bla", "Service")); endpointInfo = new EndpointInfo(serviceInfo, ""); endpointInfo.setName(new QName("bla", "Port")); endpointInfo.setAddress(NOWHERE + "bar/foo"); endpointInfo.addExtensor(policy); doNothing().when(engine).addServant(eq(new URL(NOWHERE + "bar/foo")), isA(NettyHttpHandler.class)); NettyHttpDestination dest = new MockJettyHTTPDestination(bus, transportFactory.getRegistry(), endpointInfo, null, engine); dest.retrieveEngine(); policy = dest.getServer(); observer = new MessageObserver() { public void onMessage(Message m) { inMessage = m; threadDefaultBus = BusFactory.getThreadDefaultBus(); } }; dest.setMessageObserver(observer); return dest; } private void setUpRemoveServant() throws Exception { doNothing().when(engine).removeServant(eq(new URL(NOWHERE + "bar/foo"))); } private void setUpDoService(boolean setRedirectURL) throws Exception { setUpDoService(setRedirectURL, false); } private void setUpDoService(boolean setRedirectURL, boolean sendResponse) throws Exception { setUpDoService(setRedirectURL, sendResponse, false); } private void setUpDoService(boolean setRedirectURL, boolean sendResponse, int status) throws Exception { String method = "POST"; String query = "?name"; setUpDoService(setRedirectURL, sendResponse, false, method, query, status); } private void setUpDoService(boolean setRedirectURL, boolean sendResponse, boolean decoupled, int status) throws Exception { String method = "POST"; String query = "?name"; setUpDoService(setRedirectURL, sendResponse, decoupled, method, query, status); } private void setUpDoService(boolean setRedirectURL, boolean sendResponse, boolean decoupled) throws Exception { String method = "POST"; String query = "?name"; setUpDoService(setRedirectURL, sendResponse, decoupled, method, query, 200); } private void setUpDoService(boolean setRedirectURL, boolean sendResponse, boolean decoupled, String method, String query, int status ) throws Exception { is = mock(ServletInputStream.class); os = mock(ServletOutputStream.class); request = mock(HttpServletRequest.class); response = mock(HttpServletResponse.class); when(request.getMethod()).thenReturn(method); when(request.getUserPrincipal()).thenReturn(null); if (setRedirectURL) { policy.setRedirectURL(NOWHERE + "foo/bar"); doNothing().when(response).sendRedirect(eq(NOWHERE + "foo/bar")); doNothing().when(response).flushBuffer(); } else { //getQueryString for if statement when(request.getQueryString()).thenReturn(query); if ("GET".equals(method) && "?wsdl".equals(query)) { verifyGetWSDLQuery(); } else { // test for the post when(request.getAttribute(AbstractHTTPDestination.CXF_CONTINUATION_MESSAGE)) .thenReturn(null); //when(request.getMethod()).andReturn(method); when(request.getInputStream()).thenReturn(is); when(request.getContextPath()).thenReturn("/bar"); when(request.getServletPath()).thenReturn(""); when(request.getPathInfo()).thenReturn("/foo"); when(request.getRequestURI()).thenReturn("/foo"); when(request.getRequestURL()) .thenReturn(new StringBuffer("http://localhost/foo")); doNothing().when(request) .setAttribute("org.springframework.web.servlet.HandlerMapping.bestMatchingPattern", "/foo"); when(request.getCharacterEncoding()).thenReturn(StandardCharsets.UTF_8.name()); when(request.getQueryString()).thenReturn(query); when(request.getHeader("Accept")).thenReturn("*/*"); when(request.getContentType()).thenReturn("text/xml charset=utf8"); when(request.getAttribute("org.eclipse.jetty.ajax.Continuation")).thenReturn(null); when(request.getAttribute("http.service.redirection")).thenReturn(null); List<String> contentType = new ArrayList<>(); contentType.add("text/xml"); contentType.add("charset=utf8"); List<String> authHeader = new ArrayList<>(); authHeader.add(BASIC_AUTH); List<String> headerNames = new ArrayList<>(); headerNames.add("content-type"); headerNames.add(AUTH_HEADER); when(request.getHeaderNames()).thenReturn(Utils.enumeration(headerNames)); when(request.getHeaders("content-type")).thenReturn(Utils.enumeration(contentType)); when(request.getHeaders(NettyHttpDestinationTest.AUTH_HEADER)) .thenReturn(Utils.enumeration(authHeader)); when(request.getInputStream()).thenReturn(is); doNothing().when(response).flushBuffer(); if (sendResponse) { doNothing().when(response).setStatus(status); doNothing().when(response).setContentType("text/xml charset=utf8"); doNothing().when(response).addHeader(isA(String.class), isA(String.class)); doNothing().when(response).setContentLength(0); when(response.getOutputStream()).thenReturn(os); doNothing().when(response).flushBuffer(); } when(request.getAttribute("jakarta.servlet.request.cipher_suite")).thenReturn("anythingwilldoreally"); when(request.getAttribute("javax.net.ssl.session")).thenReturn(null); when(request.getAttribute("jakarta.servlet.request.X509Certificate")).thenReturn(null); } } if (decoupled) { setupDecoupledBackChannel(); } } private void setupDecoupledBackChannel() throws IOException { decoupledBackChannel = mock(Conduit.class); doNothing().when(decoupledBackChannel).setMessageObserver(isA(MessageObserver.class)); doNothing().when(decoupledBackChannel).prepare(isA(Message.class)); } private void setUpInMessage() { inMessage.setExchange(new ExchangeImpl()); } private Message setUpOutMessage() { Message outMsg = new MessageImpl(); outMsg.putAll(inMessage); outMsg.setExchange(new ExchangeImpl()); outMsg.put(Message.PROTOCOL_HEADERS, new TreeMap<String, List<String>>(String.CASE_INSENSITIVE_ORDER)); return outMsg; } private void setUpResponseHeaders(Message outMsg) { Map<String, List<String>> responseHeaders = CastUtils.cast((Map<?, ?>)outMsg.get(Message.PROTOCOL_HEADERS)); assertNotNull("expected response headers", responseHeaders); List<String> challenges = new ArrayList<>(); challenges.add(BASIC_CHALLENGE); challenges.add(DIGEST_CHALLENGE); challenges.add(CUSTOM_CHALLENGE); responseHeaders.put(CHALLENGE_HEADER, challenges); } private void verifyGetWSDLQuery() throws Exception { when(request.getRequestURL()).thenReturn(new StringBuffer("http://localhost/bar/foo")); when(request.getPathInfo()).thenReturn("/bar/foo"); when(request.getCharacterEncoding()).thenReturn(StandardCharsets.UTF_8.name()); when(request.getQueryString()).thenReturn("wsdl"); doNothing().when(response).setContentType("text/xml"); when(response.getOutputStream()).thenReturn(os); } private void verifyDoService() throws Exception { assertSame("Default thread bus has not been set for request", bus, threadDefaultBus); assertNotNull("unexpected null message", inMessage); assertSame("unexpected HTTP request", inMessage.get(AbstractHTTPDestination.HTTP_REQUEST), request); assertSame("unexpected HTTP response", inMessage.get(AbstractHTTPDestination.HTTP_RESPONSE), response); assertEquals("unexpected method", inMessage.get(Message.HTTP_REQUEST_METHOD), "POST"); assertEquals("unexpected path", inMessage.get(Message.PATH_INFO), "/bar/foo"); assertEquals("unexpected query", inMessage.get(Message.QUERY_STRING), "?name"); assertNotNull("unexpected query", inMessage.get(TLSSessionInfo.class)); verifyRequestHeaders(); } private void verifyRequestHeaders() throws Exception { Map<String, List<String>> requestHeaders = CastUtils.cast((Map<?, ?>)inMessage.get(Message.PROTOCOL_HEADERS)); assertNotNull("expected request headers", requestHeaders); List<String> values = requestHeaders.get("content-type"); assertNotNull("expected field", values); assertEquals("unexpected values", 2, values.size()); assertTrue("expected value", values.contains("text/xml")); assertTrue("expected value", values.contains("charset=utf8")); values = requestHeaders.get(AUTH_HEADER); assertNotNull("expected field", values); assertEquals("unexpected values", 1, values.size()); assertTrue("expected value", values.contains(BASIC_AUTH)); AuthorizationPolicy authpolicy = inMessage.get(AuthorizationPolicy.class); assertNotNull("Expected some auth tokens", policy); assertEquals("expected user", USER, authpolicy.getUserName()); assertEquals("expected passwd", PASSWD, authpolicy.getPassword()); } private void verifyResponseHeaders(Message outMsg) throws Exception { Map<String, List<String>> responseHeaders = CastUtils.cast((Map<?, ?>)outMsg.get(Message.PROTOCOL_HEADERS)); assertNotNull("expected response headers", responseHeaders); //REVISIT CHALLENGE_HEADER's mean /*assertEquals("expected addField", 3, response.getAddFieldCallCount()); Enumeration e = response.getFieldValues(CHALLENGE_HEADER); List<String> challenges = new ArrayList<>(); while (e.hasMoreElements()) { challenges.add((String)e.nextElement()); } assertTrue("expected challenge", challenges.contains(BASIC_CHALLENGE)); assertTrue("expected challenge", challenges.contains(DIGEST_CHALLENGE)); assertTrue("expected challenge", challenges.contains(CUSTOM_CHALLENGE));*/ } private void verifyBackChannelSend(Conduit backChannel, Message outMsg, int status) throws Exception { verifyBackChannelSend(backChannel, outMsg, status, false); } private void verifyBackChannelSend(Conduit backChannel, Message outMsg, int status, boolean oneway) throws Exception { outMsg.getExchange().setOneWay(oneway); assertTrue("unexpected back channel type", backChannel instanceof NettyHttpDestination.BackChannelConduit); assertTrue("unexpected content formats", outMsg.getContentFormats().contains(OutputStream.class)); OutputStream responseOS = outMsg.getContent(OutputStream.class); assertNotNull("expected output stream", responseOS); assertTrue("unexpected output stream type", responseOS instanceof AbstractWrappedOutputStream); outMsg.put(Message.RESPONSE_CODE, status); responseOS.write(PAYLOAD.getBytes()); setUpResponseHeaders(outMsg); responseOS.flush(); // we don't need to check the status here /*if (status == 500) { assertEquals("unexpected status message", "Internal Server Error", response.getReason()); }*/ verifyResponseHeaders(outMsg); if (oneway) { assertNull("unexpected HTTP response", outMsg.get(AbstractHTTPDestination.HTTP_RESPONSE)); } else { assertNotNull("expected HTTP response", outMsg.get(AbstractHTTPDestination.HTTP_RESPONSE)); responseOS.close(); } } static EndpointReferenceType getEPR(String s) { return EndpointReferenceUtils.getEndpointReference(NOWHERE + s); } private static class TestJettyDestination extends NettyHttpDestination { TestJettyDestination(Bus bus, DestinationRegistry registry, EndpointInfo endpointInfo, NettyHttpServerEngineFactory serverEngineFactory) throws IOException { super(bus, registry, endpointInfo, serverEngineFactory); } @Override public Message retrieveFromContinuation(HttpServletRequest request) { return super.retrieveFromContinuation(request); } } }
apache/paimon
35,826
paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.paimon.spark; import org.apache.paimon.CoreOptions; import org.apache.paimon.catalog.Catalog; import org.apache.paimon.catalog.CatalogContext; import org.apache.paimon.catalog.CatalogFactory; import org.apache.paimon.catalog.DelegateCatalog; import org.apache.paimon.catalog.PropertyChange; import org.apache.paimon.format.csv.CsvOptions; import org.apache.paimon.function.Function; import org.apache.paimon.function.FunctionDefinition; import org.apache.paimon.options.Options; import org.apache.paimon.rest.RESTCatalog; import org.apache.paimon.schema.Schema; import org.apache.paimon.schema.SchemaChange; import org.apache.paimon.spark.catalog.FormatTableCatalog; import org.apache.paimon.spark.catalog.SparkBaseCatalog; import org.apache.paimon.spark.catalog.SupportV1Function; import org.apache.paimon.spark.catalog.SupportView; import org.apache.paimon.spark.catalog.functions.PaimonFunctions; import org.apache.paimon.spark.catalog.functions.V1FunctionConverter; import org.apache.paimon.spark.utils.CatalogUtils; import org.apache.paimon.table.FormatTable; import org.apache.paimon.types.BlobType; import org.apache.paimon.types.DataField; import org.apache.paimon.types.DataType; import org.apache.paimon.utils.ExceptionUtils; import org.apache.paimon.utils.TypeUtils; import org.apache.spark.sql.PaimonSparkSession$; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.FunctionIdentifier; import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException; import org.apache.spark.sql.catalyst.analysis.NoSuchFunctionException; import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException; import org.apache.spark.sql.catalyst.catalog.CatalogFunction; import org.apache.spark.sql.catalyst.catalog.PaimonV1FunctionRegistry; import org.apache.spark.sql.catalyst.expressions.Expression; import org.apache.spark.sql.catalyst.parser.extensions.UnResolvedPaimonV1Function; import org.apache.spark.sql.connector.catalog.FunctionCatalog; import org.apache.spark.sql.connector.catalog.Identifier; import org.apache.spark.sql.connector.catalog.NamespaceChange; import org.apache.spark.sql.connector.catalog.SupportsNamespaces; import org.apache.spark.sql.connector.catalog.Table; import org.apache.spark.sql.connector.catalog.TableCatalog; import org.apache.spark.sql.connector.catalog.TableChange; import org.apache.spark.sql.connector.catalog.functions.UnboundFunction; import org.apache.spark.sql.connector.expressions.FieldReference; import org.apache.spark.sql.connector.expressions.IdentityTransform; import org.apache.spark.sql.connector.expressions.NamedReference; import org.apache.spark.sql.connector.expressions.Transform; import org.apache.spark.sql.execution.PaimonFormatTable; import org.apache.spark.sql.execution.PartitionedCSVTable; import org.apache.spark.sql.execution.PartitionedJsonTable; import org.apache.spark.sql.execution.PartitionedOrcTable; import org.apache.spark.sql.execution.PartitionedParquetTable; import org.apache.spark.sql.execution.datasources.csv.CSVFileFormat; import org.apache.spark.sql.execution.datasources.json.JsonFileFormat; import org.apache.spark.sql.execution.datasources.orc.OrcFileFormat; import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat; import org.apache.spark.sql.execution.datasources.v2.FileTable; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; import org.apache.spark.sql.util.CaseInsensitiveStringMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; import static org.apache.paimon.CoreOptions.FILE_FORMAT; import static org.apache.paimon.CoreOptions.TYPE; import static org.apache.paimon.TableType.FORMAT_TABLE; import static org.apache.paimon.spark.SparkCatalogOptions.DEFAULT_DATABASE; import static org.apache.paimon.spark.SparkCatalogOptions.V1FUNCTION_ENABLED; import static org.apache.paimon.spark.SparkTypeUtils.CURRENT_DEFAULT_COLUMN_METADATA_KEY; import static org.apache.paimon.spark.SparkTypeUtils.toPaimonType; import static org.apache.paimon.spark.util.OptionUtils.checkRequiredConfigurations; import static org.apache.paimon.spark.util.OptionUtils.copyWithSQLConf; import static org.apache.paimon.spark.utils.CatalogUtils.checkNamespace; import static org.apache.paimon.spark.utils.CatalogUtils.checkNoDefaultValue; import static org.apache.paimon.spark.utils.CatalogUtils.isUpdateColumnDefaultValue; import static org.apache.paimon.spark.utils.CatalogUtils.removeCatalogName; import static org.apache.paimon.spark.utils.CatalogUtils.toIdentifier; import static org.apache.paimon.spark.utils.CatalogUtils.toUpdateColumnDefaultValue; import static org.apache.paimon.utils.Preconditions.checkArgument; /** Spark {@link TableCatalog} for paimon. */ public class SparkCatalog extends SparkBaseCatalog implements SupportView, SupportV1Function, FunctionCatalog, SupportsNamespaces, FormatTableCatalog { private static final Logger LOG = LoggerFactory.getLogger(SparkCatalog.class); public static final String FUNCTION_DEFINITION_NAME = "spark"; private static final String PRIMARY_KEY_IDENTIFIER = "primary-key"; private Catalog catalog; private String defaultDatabase; private boolean v1FunctionEnabled; @Nullable private PaimonV1FunctionRegistry v1FunctionRegistry; @Override public void initialize(String name, CaseInsensitiveStringMap options) { checkRequiredConfigurations(); SparkSession sparkSession = PaimonSparkSession$.MODULE$.active(); this.catalogName = name; CatalogContext catalogContext = CatalogContext.create( Options.fromMap(options), sparkSession.sessionState().newHadoopConf()); this.catalog = CatalogFactory.createCatalog(catalogContext); this.defaultDatabase = options.getOrDefault(DEFAULT_DATABASE.key(), DEFAULT_DATABASE.defaultValue()); this.v1FunctionEnabled = options.getBoolean(V1FUNCTION_ENABLED.key(), V1FUNCTION_ENABLED.defaultValue()) && DelegateCatalog.rootCatalog(catalog) instanceof RESTCatalog; if (v1FunctionEnabled) { this.v1FunctionRegistry = new PaimonV1FunctionRegistry(sparkSession); } try { catalog.getDatabase(defaultDatabase); } catch (Catalog.DatabaseNotExistException e) { LOG.info( "Default database '{}' does not exist, caused by: {}, start to create it", defaultDatabase, ExceptionUtils.stringifyException(e)); try { createNamespace(defaultNamespace(), new HashMap<>()); } catch (NamespaceAlreadyExistsException ignored) { } } } @Override public Catalog paimonCatalog() { return catalog; } // ======================= database methods =============================== @Override public String[] defaultNamespace() { return new String[] {defaultDatabase}; } @Override public void createNamespace(String[] namespace, Map<String, String> metadata) throws NamespaceAlreadyExistsException { checkNamespace(namespace); try { String databaseName = getDatabaseNameFromNamespace(namespace); catalog.createDatabase(databaseName, false, metadata); } catch (Catalog.DatabaseAlreadyExistException e) { throw new NamespaceAlreadyExistsException(namespace); } } @Override public String[][] listNamespaces() { List<String> databases = catalog.listDatabases(); String[][] namespaces = new String[databases.size()][]; for (int i = 0; i < databases.size(); i++) { namespaces[i] = new String[] {databases.get(i)}; } return namespaces; } @Override public String[][] listNamespaces(String[] namespace) throws NoSuchNamespaceException { if (namespace.length == 0) { return listNamespaces(); } checkNamespace(namespace); try { String databaseName = getDatabaseNameFromNamespace(namespace); catalog.getDatabase(databaseName); return new String[0][]; } catch (Catalog.DatabaseNotExistException e) { throw new NoSuchNamespaceException(namespace); } } @Override public Map<String, String> loadNamespaceMetadata(String[] namespace) throws NoSuchNamespaceException { checkNamespace(namespace); try { String databaseName = getDatabaseNameFromNamespace(namespace); return catalog.getDatabase(databaseName).options(); } catch (Catalog.DatabaseNotExistException e) { throw new NoSuchNamespaceException(namespace); } } /** * Drop a namespace from the catalog, recursively dropping all objects within the namespace. * This interface implementation only supports the Spark 3.0, 3.1 and 3.2. * * <p>If the catalog implementation does not support this operation, it may throw {@link * UnsupportedOperationException}. * * @param namespace a multi-part namespace * @return true if the namespace was dropped * @throws UnsupportedOperationException If drop is not a supported operation */ public boolean dropNamespace(String[] namespace) throws NoSuchNamespaceException { return dropNamespace(namespace, false); } /** * Drop a namespace from the catalog with cascade mode, recursively dropping all objects within * the namespace if cascade is true. This interface implementation supports the Spark 3.3+. * * <p>If the catalog implementation does not support this operation, it may throw {@link * UnsupportedOperationException}. * * @param namespace a multi-part namespace * @param cascade When true, deletes all objects under the namespace * @return true if the namespace was dropped * @throws UnsupportedOperationException If drop is not a supported operation */ public boolean dropNamespace(String[] namespace, boolean cascade) throws NoSuchNamespaceException { checkNamespace(namespace); try { String databaseName = getDatabaseNameFromNamespace(namespace); catalog.dropDatabase(databaseName, false, cascade); return true; } catch (Catalog.DatabaseNotExistException e) { throw new NoSuchNamespaceException(namespace); } catch (Catalog.DatabaseNotEmptyException e) { throw new UnsupportedOperationException( String.format("Namespace %s is not empty", Arrays.toString(namespace))); } } @Override public void alterNamespace(String[] namespace, NamespaceChange... changes) throws NoSuchNamespaceException { checkNamespace(namespace); try { String databaseName = getDatabaseNameFromNamespace(namespace); List<PropertyChange> propertyChanges = Arrays.stream(changes).map(this::toPropertyChange).collect(Collectors.toList()); catalog.alterDatabase(databaseName, propertyChanges, false); } catch (Catalog.DatabaseNotExistException e) { throw new NoSuchNamespaceException(namespace); } } // ======================= table methods =============================== @Override public Identifier[] listTables(String[] namespace) throws NoSuchNamespaceException { checkNamespace(namespace); try { String databaseName = getDatabaseNameFromNamespace(namespace); return catalog.listTables(databaseName).stream() .map(table -> Identifier.of(namespace, table)) .toArray(Identifier[]::new); } catch (Catalog.DatabaseNotExistException e) { throw new NoSuchNamespaceException(namespace); } } @Override public void invalidateTable(Identifier ident) { catalog.invalidateTable(toIdentifier(ident)); } @Override public org.apache.spark.sql.connector.catalog.Table loadTable(Identifier ident) throws NoSuchTableException { return loadSparkTable(ident, Collections.emptyMap()); } /** * Do not annotate with <code>@override</code> here to maintain compatibility with Spark 3.2-. */ public SparkTable loadTable(Identifier ident, String version) throws NoSuchTableException { LOG.info("Time travel to version '{}'.", version); org.apache.spark.sql.connector.catalog.Table table = loadSparkTable( ident, Collections.singletonMap(CoreOptions.SCAN_VERSION.key(), version)); if (table instanceof SparkTable) { return (SparkTable) table; } else { throw new NoSuchTableException(ident); } } /** * Do not annotate with <code>@override</code> here to maintain compatibility with Spark 3.2-. * * <p>NOTE: Time unit of timestamp here is microsecond (see {@link * TableCatalog#loadTable(Identifier, long)}). But in SQL you should use seconds. */ public SparkTable loadTable(Identifier ident, long timestamp) throws NoSuchTableException { // Paimon's timestamp use millisecond timestamp = timestamp / 1000; LOG.info("Time travel target timestamp is {} milliseconds.", timestamp); org.apache.spark.sql.connector.catalog.Table table = loadSparkTable( ident, Collections.singletonMap( CoreOptions.SCAN_TIMESTAMP_MILLIS.key(), String.valueOf(timestamp))); if (table instanceof SparkTable) { return (SparkTable) table; } else { throw new NoSuchTableException(ident); } } @Override public org.apache.spark.sql.connector.catalog.Table alterTable( Identifier ident, TableChange... changes) throws NoSuchTableException { List<SchemaChange> schemaChanges = Arrays.stream(changes).map(this::toSchemaChange).collect(Collectors.toList()); try { catalog.alterTable(toIdentifier(ident), schemaChanges, false); return loadTable(ident); } catch (Catalog.TableNotExistException e) { throw new NoSuchTableException(ident); } catch (Catalog.ColumnAlreadyExistException | Catalog.ColumnNotExistException e) { throw new RuntimeException(e); } } @Override public org.apache.spark.sql.connector.catalog.Table createTable( Identifier ident, StructType schema, Transform[] partitions, Map<String, String> properties) throws TableAlreadyExistsException, NoSuchNamespaceException { try { catalog.createTable( toIdentifier(ident), toInitialSchema(schema, partitions, properties), false); return loadTable(ident); } catch (Catalog.TableAlreadyExistException e) { throw new TableAlreadyExistsException(ident); } catch (Catalog.DatabaseNotExistException e) { throw new NoSuchNamespaceException(ident.namespace()); } catch (NoSuchTableException e) { throw new RuntimeException(e); } } @Override public boolean dropTable(Identifier ident) { try { catalog.dropTable(toIdentifier(ident), false); return true; } catch (Catalog.TableNotExistException e) { return false; } } private SchemaChange toSchemaChange(TableChange change) { if (change instanceof TableChange.SetProperty) { TableChange.SetProperty set = (TableChange.SetProperty) change; validateAlterProperty(set.property()); if (set.property().equals(TableCatalog.PROP_COMMENT)) { return SchemaChange.updateComment(set.value()); } else { return SchemaChange.setOption(set.property(), set.value()); } } else if (change instanceof TableChange.RemoveProperty) { TableChange.RemoveProperty remove = (TableChange.RemoveProperty) change; validateAlterProperty(remove.property()); if (remove.property().equals(TableCatalog.PROP_COMMENT)) { return SchemaChange.updateComment(null); } else { return SchemaChange.removeOption(remove.property()); } } else if (change instanceof TableChange.AddColumn) { TableChange.AddColumn add = (TableChange.AddColumn) change; SchemaChange.Move move = getMove(add.position(), add.fieldNames()); checkNoDefaultValue(add); return SchemaChange.addColumn( add.fieldNames(), toPaimonType(add.dataType()).copy(add.isNullable()), add.comment(), move); } else if (change instanceof TableChange.RenameColumn) { TableChange.RenameColumn rename = (TableChange.RenameColumn) change; return SchemaChange.renameColumn(rename.fieldNames(), rename.newName()); } else if (change instanceof TableChange.DeleteColumn) { TableChange.DeleteColumn delete = (TableChange.DeleteColumn) change; return SchemaChange.dropColumn(delete.fieldNames()); } else if (change instanceof TableChange.UpdateColumnType) { TableChange.UpdateColumnType update = (TableChange.UpdateColumnType) change; return SchemaChange.updateColumnType( update.fieldNames(), toPaimonType(update.newDataType()), true); } else if (change instanceof TableChange.UpdateColumnNullability) { TableChange.UpdateColumnNullability update = (TableChange.UpdateColumnNullability) change; return SchemaChange.updateColumnNullability(update.fieldNames(), update.nullable()); } else if (change instanceof TableChange.UpdateColumnComment) { TableChange.UpdateColumnComment update = (TableChange.UpdateColumnComment) change; return SchemaChange.updateColumnComment(update.fieldNames(), update.newComment()); } else if (change instanceof TableChange.UpdateColumnPosition) { TableChange.UpdateColumnPosition update = (TableChange.UpdateColumnPosition) change; SchemaChange.Move move = getMove(update.position(), update.fieldNames()); return SchemaChange.updateColumnPosition(move); } else if (isUpdateColumnDefaultValue(change)) { return toUpdateColumnDefaultValue(change); } else { throw new UnsupportedOperationException( "Change is not supported: " + change.getClass()); } } private static SchemaChange.Move getMove( TableChange.ColumnPosition columnPosition, String[] fieldNames) { SchemaChange.Move move = null; if (columnPosition instanceof TableChange.First) { move = SchemaChange.Move.first(fieldNames[0]); } else if (columnPosition instanceof TableChange.After) { move = SchemaChange.Move.after( fieldNames[0], ((TableChange.After) columnPosition).column()); } return move; } private Schema toInitialSchema( StructType schema, Transform[] partitions, Map<String, String> properties) { Map<String, String> normalizedProperties = new HashMap<>(properties); String blobFieldName = properties.get(CoreOptions.BLOB_FIELD.key()); String provider = properties.get(TableCatalog.PROP_PROVIDER); if (!usePaimon(provider)) { if (isFormatTable(provider)) { normalizedProperties.put(TYPE.key(), FORMAT_TABLE.toString()); normalizedProperties.put(FILE_FORMAT.key(), provider.toLowerCase()); } else { throw new UnsupportedOperationException("Provider is not supported: " + provider); } } normalizedProperties.remove(TableCatalog.PROP_PROVIDER); normalizedProperties.remove(PRIMARY_KEY_IDENTIFIER); normalizedProperties.remove(TableCatalog.PROP_COMMENT); if (normalizedProperties.containsKey(TableCatalog.PROP_LOCATION)) { String path = normalizedProperties.remove(TableCatalog.PROP_LOCATION); normalizedProperties.put(CoreOptions.PATH.key(), path); } String pkAsString = properties.get(PRIMARY_KEY_IDENTIFIER); List<String> primaryKeys = pkAsString == null ? Collections.emptyList() : Arrays.stream(pkAsString.split(",")) .map(String::trim) .collect(Collectors.toList()); Schema.Builder schemaBuilder = Schema.newBuilder() .options(normalizedProperties) .primaryKey(primaryKeys) .partitionKeys(convertPartitionTransforms(partitions)) .comment(properties.getOrDefault(TableCatalog.PROP_COMMENT, null)); for (StructField field : schema.fields()) { String name = field.name(); DataType type; if (Objects.equals(blobFieldName, name)) { checkArgument( field.dataType() instanceof org.apache.spark.sql.types.BinaryType, "The type of blob field must be binary"); type = new BlobType(); } else { type = toPaimonType(field.dataType()).copy(field.nullable()); } String comment = field.getComment().getOrElse(() -> null); if (field.metadata().contains(CURRENT_DEFAULT_COLUMN_METADATA_KEY)) { String defaultValue = field.metadata().getString(CURRENT_DEFAULT_COLUMN_METADATA_KEY); schemaBuilder.column(name, type, comment, defaultValue); } else { schemaBuilder.column(name, type, comment); } } return schemaBuilder.build(); } private void validateAlterProperty(String alterKey) { if (PRIMARY_KEY_IDENTIFIER.equals(alterKey)) { throw new UnsupportedOperationException("Alter primary key is not supported"); } } @Override public void renameTable(Identifier oldIdent, Identifier newIdent) throws NoSuchTableException, TableAlreadyExistsException { try { catalog.renameTable( toIdentifier(oldIdent), toIdentifier(removeCatalogName(newIdent, catalogName)), false); } catch (Catalog.TableNotExistException e) { throw new NoSuchTableException(oldIdent); } catch (Catalog.TableAlreadyExistException e) { throw new TableAlreadyExistsException(newIdent); } } // ======================= Function methods =============================== @Override public Identifier[] listFunctions(String[] namespace) throws NoSuchNamespaceException { if (isSystemFunctionNamespace(namespace)) { List<Identifier> result = new ArrayList<>(); PaimonFunctions.names().forEach(name -> result.add(Identifier.of(namespace, name))); return result.toArray(new Identifier[0]); } else if (isDatabaseFunctionNamespace(namespace)) { List<Identifier> result = new ArrayList<>(); String databaseName = getDatabaseNameFromNamespace(namespace); try { catalog.listFunctions(databaseName) .forEach(name -> result.add(Identifier.of(namespace, name))); } catch (Catalog.DatabaseNotExistException e) { throw new NoSuchNamespaceException(namespace); } return result.toArray(new Identifier[0]); } throw new NoSuchNamespaceException(namespace); } @Override public UnboundFunction loadFunction(Identifier ident) throws NoSuchFunctionException { String[] namespace = ident.namespace(); if (isSystemFunctionNamespace(namespace)) { UnboundFunction func = PaimonFunctions.load(ident.name()); if (func != null) { return func; } } else if (isDatabaseFunctionNamespace(namespace)) { try { Function paimonFunction = catalog.getFunction(toIdentifier(ident)); FunctionDefinition functionDefinition = paimonFunction.definition(FUNCTION_DEFINITION_NAME); if (functionDefinition instanceof FunctionDefinition.LambdaFunctionDefinition) { FunctionDefinition.LambdaFunctionDefinition lambdaFunctionDefinition = (FunctionDefinition.LambdaFunctionDefinition) functionDefinition; if (paimonFunction.returnParams().isPresent()) { List<DataField> dataFields = paimonFunction.returnParams().get(); if (dataFields.size() == 1) { DataField dataField = dataFields.get(0); return new LambdaScalarFunction( ident.name(), CatalogUtils.paimonType2SparkType(dataField.type()), CatalogUtils.paimonType2JavaType(dataField.type()), lambdaFunctionDefinition.definition()); } else { throw new UnsupportedOperationException( "outParams size > 1 is not supported"); } } } } catch (Catalog.FunctionNotExistException e) { throw new NoSuchFunctionException(ident); } } throw new NoSuchFunctionException(ident); } private boolean isSystemFunctionNamespace(String[] namespace) { // Allow for empty namespace, as Spark's bucket join will use `bucket` function with empty // namespace to generate transforms for partitioning. return namespace.length == 0 || isSystemNamespace(namespace); } private boolean isDatabaseFunctionNamespace(String[] namespace) { return namespace.length == 1 && namespaceExists(namespace); } private PaimonV1FunctionRegistry v1FunctionRegistry() { assert v1FunctionRegistry != null; return v1FunctionRegistry; } @Override public boolean v1FunctionEnabled() { return v1FunctionEnabled; } @Override public Function getFunction(FunctionIdentifier funcIdent) throws Exception { return paimonCatalog().getFunction(V1FunctionConverter.fromFunctionIdentifier(funcIdent)); } @Override public void createV1Function(CatalogFunction v1Function, boolean ignoreIfExists) throws Exception { Function paimonFunction = V1FunctionConverter.fromV1Function(v1Function); paimonCatalog() .createFunction( V1FunctionConverter.fromFunctionIdentifier(v1Function.identifier()), paimonFunction, ignoreIfExists); } @Override public boolean v1FunctionRegistered(FunctionIdentifier funcIdent) { return v1FunctionRegistry().isRegistered(funcIdent); } @Override public Expression registerAndResolveV1Function( UnResolvedPaimonV1Function unresolvedV1Function) { return v1FunctionRegistry().registerAndResolveFunction(unresolvedV1Function); } @Override public void dropV1Function(FunctionIdentifier funcIdent, boolean ifExists) throws Exception { v1FunctionRegistry().unregisterFunction(funcIdent); paimonCatalog() .dropFunction(V1FunctionConverter.fromFunctionIdentifier(funcIdent), ifExists); } // ======================= Tools methods =============================== protected org.apache.spark.sql.connector.catalog.Table loadSparkTable( Identifier ident, Map<String, String> extraOptions) throws NoSuchTableException { try { org.apache.paimon.table.Table paimonTable = catalog.getTable(toIdentifier(ident)); if (paimonTable instanceof FormatTable) { return toSparkFormatTable(ident, (FormatTable) paimonTable); } else { return new SparkTable( copyWithSQLConf( paimonTable, catalogName, toIdentifier(ident), extraOptions)); } } catch (Catalog.TableNotExistException e) { throw new NoSuchTableException(ident); } } private static Table toSparkFormatTable(Identifier ident, FormatTable formatTable) { SparkSession spark = PaimonSparkSession$.MODULE$.active(); StructType schema = SparkTypeUtils.fromPaimonRowType(formatTable.rowType()); StructType partitionSchema = SparkTypeUtils.fromPaimonRowType( TypeUtils.project(formatTable.rowType(), formatTable.partitionKeys())); List<String> pathList = new ArrayList<>(); pathList.add(formatTable.location()); Map<String, String> optionsMap = formatTable.options(); CoreOptions coreOptions = new CoreOptions(optionsMap); if (coreOptions.formatTableImplementationIsPaimon()) { return new PaimonFormatTable( spark, new CaseInsensitiveStringMap(optionsMap), scala.collection.JavaConverters.asScalaBuffer(pathList).toSeq(), schema, partitionSchema, formatTable, ident.name()); } Options options = Options.fromMap(formatTable.options()); return convertToFileTable( formatTable, ident, pathList, options, spark, schema, partitionSchema); } private static FileTable convertToFileTable( FormatTable formatTable, Identifier ident, List<String> pathList, Options options, SparkSession spark, StructType schema, StructType partitionSchema) { CaseInsensitiveStringMap dsOptions = new CaseInsensitiveStringMap(options.toMap()); if (formatTable.format() == FormatTable.Format.CSV) { options.set("sep", options.get(CsvOptions.FIELD_DELIMITER)); dsOptions = new CaseInsensitiveStringMap(options.toMap()); return new PartitionedCSVTable( ident.name(), spark, dsOptions, scala.collection.JavaConverters.asScalaBuffer(pathList).toSeq(), scala.Option.apply(schema), CSVFileFormat.class, partitionSchema); } else if (formatTable.format() == FormatTable.Format.ORC) { return new PartitionedOrcTable( ident.name(), spark, dsOptions, scala.collection.JavaConverters.asScalaBuffer(pathList).toSeq(), scala.Option.apply(schema), OrcFileFormat.class, partitionSchema); } else if (formatTable.format() == FormatTable.Format.PARQUET) { return new PartitionedParquetTable( ident.name(), spark, dsOptions, scala.collection.JavaConverters.asScalaBuffer(pathList).toSeq(), scala.Option.apply(schema), ParquetFileFormat.class, partitionSchema); } else if (formatTable.format() == FormatTable.Format.JSON) { return new PartitionedJsonTable( ident.name(), spark, dsOptions, scala.collection.JavaConverters.asScalaBuffer(pathList).toSeq(), scala.Option.apply(schema), JsonFileFormat.class, partitionSchema); } else { throw new UnsupportedOperationException( "Unsupported format table " + ident.name() + " format " + formatTable.format().name()); } } protected List<String> convertPartitionTransforms(Transform[] transforms) { List<String> partitionColNames = new ArrayList<>(transforms.length); for (Transform transform : transforms) { if (!(transform instanceof IdentityTransform)) { throw new UnsupportedOperationException( "Unsupported partition transform: " + transform); } NamedReference ref = ((IdentityTransform) transform).ref(); if (!(ref instanceof FieldReference || ref.fieldNames().length != 1)) { throw new UnsupportedOperationException( "Unsupported partition transform: " + transform); } partitionColNames.add(ref.fieldNames()[0]); } return partitionColNames; } private PropertyChange toPropertyChange(NamespaceChange change) { if (change instanceof NamespaceChange.SetProperty) { NamespaceChange.SetProperty set = (NamespaceChange.SetProperty) change; return PropertyChange.setProperty(set.property(), set.value()); } else if (change instanceof NamespaceChange.RemoveProperty) { NamespaceChange.RemoveProperty remove = (NamespaceChange.RemoveProperty) change; return PropertyChange.removeProperty(remove.property()); } else { throw new UnsupportedOperationException( "Change is not supported: " + change.getClass()); } } private String getDatabaseNameFromNamespace(String[] namespace) { return namespace[0]; } }
googleapis/google-cloud-java
35,373
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/CharsToIgnore.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * Characters to skip when doing deidentification of a value. These will be left * alone and skipped. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.CharsToIgnore} */ public final class CharsToIgnore extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.CharsToIgnore) CharsToIgnoreOrBuilder { private static final long serialVersionUID = 0L; // Use CharsToIgnore.newBuilder() to construct. private CharsToIgnore(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CharsToIgnore() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CharsToIgnore(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_CharsToIgnore_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_CharsToIgnore_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.CharsToIgnore.class, com.google.privacy.dlp.v2.CharsToIgnore.Builder.class); } /** * * * <pre> * Convenience enum for indicating common characters to not transform. * </pre> * * Protobuf enum {@code google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore} */ public enum CommonCharsToIgnore implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Unused. * </pre> * * <code>COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0;</code> */ COMMON_CHARS_TO_IGNORE_UNSPECIFIED(0), /** * * * <pre> * 0-9 * </pre> * * <code>NUMERIC = 1;</code> */ NUMERIC(1), /** * * * <pre> * A-Z * </pre> * * <code>ALPHA_UPPER_CASE = 2;</code> */ ALPHA_UPPER_CASE(2), /** * * * <pre> * a-z * </pre> * * <code>ALPHA_LOWER_CASE = 3;</code> */ ALPHA_LOWER_CASE(3), /** * * * <pre> * US Punctuation, one of !"#$%&amp;'()*+,-./:;&lt;=&gt;?&#64;[&#92;]^_`{|}~ * </pre> * * <code>PUNCTUATION = 4;</code> */ PUNCTUATION(4), /** * * * <pre> * Whitespace character, one of [ &#92;t&#92;n&#92;x0B&#92;f&#92;r] * </pre> * * <code>WHITESPACE = 5;</code> */ WHITESPACE(5), UNRECOGNIZED(-1), ; /** * * * <pre> * Unused. * </pre> * * <code>COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0;</code> */ public static final int COMMON_CHARS_TO_IGNORE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * 0-9 * </pre> * * <code>NUMERIC = 1;</code> */ public static final int NUMERIC_VALUE = 1; /** * * * <pre> * A-Z * </pre> * * <code>ALPHA_UPPER_CASE = 2;</code> */ public static final int ALPHA_UPPER_CASE_VALUE = 2; /** * * * <pre> * a-z * </pre> * * <code>ALPHA_LOWER_CASE = 3;</code> */ public static final int ALPHA_LOWER_CASE_VALUE = 3; /** * * * <pre> * US Punctuation, one of !"#$%&amp;'()*+,-./:;&lt;=&gt;?&#64;[&#92;]^_`{|}~ * </pre> * * <code>PUNCTUATION = 4;</code> */ public static final int PUNCTUATION_VALUE = 4; /** * * * <pre> * Whitespace character, one of [ &#92;t&#92;n&#92;x0B&#92;f&#92;r] * </pre> * * <code>WHITESPACE = 5;</code> */ public static final int WHITESPACE_VALUE = 5; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CommonCharsToIgnore valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static CommonCharsToIgnore forNumber(int value) { switch (value) { case 0: return COMMON_CHARS_TO_IGNORE_UNSPECIFIED; case 1: return NUMERIC; case 2: return ALPHA_UPPER_CASE; case 3: return ALPHA_LOWER_CASE; case 4: return PUNCTUATION; case 5: return WHITESPACE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<CommonCharsToIgnore> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<CommonCharsToIgnore> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<CommonCharsToIgnore>() { public CommonCharsToIgnore findValueByNumber(int number) { return CommonCharsToIgnore.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.privacy.dlp.v2.CharsToIgnore.getDescriptor().getEnumTypes().get(0); } private static final CommonCharsToIgnore[] VALUES = values(); public static CommonCharsToIgnore valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private CommonCharsToIgnore(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore) } private int charactersCase_ = 0; @SuppressWarnings("serial") private java.lang.Object characters_; public enum CharactersCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CHARACTERS_TO_SKIP(1), COMMON_CHARACTERS_TO_IGNORE(2), CHARACTERS_NOT_SET(0); private final int value; private CharactersCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CharactersCase valueOf(int value) { return forNumber(value); } public static CharactersCase forNumber(int value) { switch (value) { case 1: return CHARACTERS_TO_SKIP; case 2: return COMMON_CHARACTERS_TO_IGNORE; case 0: return CHARACTERS_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public CharactersCase getCharactersCase() { return CharactersCase.forNumber(charactersCase_); } public static final int CHARACTERS_TO_SKIP_FIELD_NUMBER = 1; /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @return Whether the charactersToSkip field is set. */ public boolean hasCharactersToSkip() { return charactersCase_ == 1; } /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @return The charactersToSkip. */ public java.lang.String getCharactersToSkip() { java.lang.Object ref = ""; if (charactersCase_ == 1) { ref = characters_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (charactersCase_ == 1) { characters_ = s; } return s; } } /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @return The bytes for charactersToSkip. */ public com.google.protobuf.ByteString getCharactersToSkipBytes() { java.lang.Object ref = ""; if (charactersCase_ == 1) { ref = characters_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (charactersCase_ == 1) { characters_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int COMMON_CHARACTERS_TO_IGNORE_FIELD_NUMBER = 2; /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code>.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @return Whether the commonCharactersToIgnore field is set. */ public boolean hasCommonCharactersToIgnore() { return charactersCase_ == 2; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code>.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @return The enum numeric value on the wire for commonCharactersToIgnore. */ public int getCommonCharactersToIgnoreValue() { if (charactersCase_ == 2) { return (java.lang.Integer) characters_; } return 0; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code>.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @return The commonCharactersToIgnore. */ public com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore getCommonCharactersToIgnore() { if (charactersCase_ == 2) { com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore result = com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore.forNumber( (java.lang.Integer) characters_); return result == null ? com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore.UNRECOGNIZED : result; } return com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore .COMMON_CHARS_TO_IGNORE_UNSPECIFIED; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (charactersCase_ == 1) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, characters_); } if (charactersCase_ == 2) { output.writeEnum(2, ((java.lang.Integer) characters_)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (charactersCase_ == 1) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, characters_); } if (charactersCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeEnumSize( 2, ((java.lang.Integer) characters_)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.CharsToIgnore)) { return super.equals(obj); } com.google.privacy.dlp.v2.CharsToIgnore other = (com.google.privacy.dlp.v2.CharsToIgnore) obj; if (!getCharactersCase().equals(other.getCharactersCase())) return false; switch (charactersCase_) { case 1: if (!getCharactersToSkip().equals(other.getCharactersToSkip())) return false; break; case 2: if (getCommonCharactersToIgnoreValue() != other.getCommonCharactersToIgnoreValue()) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (charactersCase_) { case 1: hash = (37 * hash) + CHARACTERS_TO_SKIP_FIELD_NUMBER; hash = (53 * hash) + getCharactersToSkip().hashCode(); break; case 2: hash = (37 * hash) + COMMON_CHARACTERS_TO_IGNORE_FIELD_NUMBER; hash = (53 * hash) + getCommonCharactersToIgnoreValue(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.CharsToIgnore parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.CharsToIgnore parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.CharsToIgnore parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.CharsToIgnore prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Characters to skip when doing deidentification of a value. These will be left * alone and skipped. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.CharsToIgnore} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.CharsToIgnore) com.google.privacy.dlp.v2.CharsToIgnoreOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_CharsToIgnore_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_CharsToIgnore_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.CharsToIgnore.class, com.google.privacy.dlp.v2.CharsToIgnore.Builder.class); } // Construct using com.google.privacy.dlp.v2.CharsToIgnore.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; charactersCase_ = 0; characters_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_CharsToIgnore_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.CharsToIgnore getDefaultInstanceForType() { return com.google.privacy.dlp.v2.CharsToIgnore.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.CharsToIgnore build() { com.google.privacy.dlp.v2.CharsToIgnore result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.CharsToIgnore buildPartial() { com.google.privacy.dlp.v2.CharsToIgnore result = new com.google.privacy.dlp.v2.CharsToIgnore(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.privacy.dlp.v2.CharsToIgnore result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.privacy.dlp.v2.CharsToIgnore result) { result.charactersCase_ = charactersCase_; result.characters_ = this.characters_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.CharsToIgnore) { return mergeFrom((com.google.privacy.dlp.v2.CharsToIgnore) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.CharsToIgnore other) { if (other == com.google.privacy.dlp.v2.CharsToIgnore.getDefaultInstance()) return this; switch (other.getCharactersCase()) { case CHARACTERS_TO_SKIP: { charactersCase_ = 1; characters_ = other.characters_; onChanged(); break; } case COMMON_CHARACTERS_TO_IGNORE: { setCommonCharactersToIgnoreValue(other.getCommonCharactersToIgnoreValue()); break; } case CHARACTERS_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); charactersCase_ = 1; characters_ = s; break; } // case 10 case 16: { int rawValue = input.readEnum(); charactersCase_ = 2; characters_ = rawValue; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int charactersCase_ = 0; private java.lang.Object characters_; public CharactersCase getCharactersCase() { return CharactersCase.forNumber(charactersCase_); } public Builder clearCharacters() { charactersCase_ = 0; characters_ = null; onChanged(); return this; } private int bitField0_; /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @return Whether the charactersToSkip field is set. */ @java.lang.Override public boolean hasCharactersToSkip() { return charactersCase_ == 1; } /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @return The charactersToSkip. */ @java.lang.Override public java.lang.String getCharactersToSkip() { java.lang.Object ref = ""; if (charactersCase_ == 1) { ref = characters_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (charactersCase_ == 1) { characters_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @return The bytes for charactersToSkip. */ @java.lang.Override public com.google.protobuf.ByteString getCharactersToSkipBytes() { java.lang.Object ref = ""; if (charactersCase_ == 1) { ref = characters_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (charactersCase_ == 1) { characters_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @param value The charactersToSkip to set. * @return This builder for chaining. */ public Builder setCharactersToSkip(java.lang.String value) { if (value == null) { throw new NullPointerException(); } charactersCase_ = 1; characters_ = value; onChanged(); return this; } /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @return This builder for chaining. */ public Builder clearCharactersToSkip() { if (charactersCase_ == 1) { charactersCase_ = 0; characters_ = null; onChanged(); } return this; } /** * * * <pre> * Characters to not transform when masking. * </pre> * * <code>string characters_to_skip = 1;</code> * * @param value The bytes for charactersToSkip to set. * @return This builder for chaining. */ public Builder setCharactersToSkipBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); charactersCase_ = 1; characters_ = value; onChanged(); return this; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code> * .google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @return Whether the commonCharactersToIgnore field is set. */ @java.lang.Override public boolean hasCommonCharactersToIgnore() { return charactersCase_ == 2; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code> * .google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @return The enum numeric value on the wire for commonCharactersToIgnore. */ @java.lang.Override public int getCommonCharactersToIgnoreValue() { if (charactersCase_ == 2) { return ((java.lang.Integer) characters_).intValue(); } return 0; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code> * .google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @param value The enum numeric value on the wire for commonCharactersToIgnore to set. * @return This builder for chaining. */ public Builder setCommonCharactersToIgnoreValue(int value) { charactersCase_ = 2; characters_ = value; onChanged(); return this; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code> * .google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @return The commonCharactersToIgnore. */ @java.lang.Override public com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore getCommonCharactersToIgnore() { if (charactersCase_ == 2) { com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore result = com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore.forNumber( (java.lang.Integer) characters_); return result == null ? com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore.UNRECOGNIZED : result; } return com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore .COMMON_CHARS_TO_IGNORE_UNSPECIFIED; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code> * .google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @param value The commonCharactersToIgnore to set. * @return This builder for chaining. */ public Builder setCommonCharactersToIgnore( com.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore value) { if (value == null) { throw new NullPointerException(); } charactersCase_ = 2; characters_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Common characters to not transform when masking. Useful to avoid removing * punctuation. * </pre> * * <code> * .google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore common_characters_to_ignore = 2; * </code> * * @return This builder for chaining. */ public Builder clearCommonCharactersToIgnore() { if (charactersCase_ == 2) { charactersCase_ = 0; characters_ = null; onChanged(); } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.CharsToIgnore) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CharsToIgnore) private static final com.google.privacy.dlp.v2.CharsToIgnore DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.CharsToIgnore(); } public static com.google.privacy.dlp.v2.CharsToIgnore getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CharsToIgnore> PARSER = new com.google.protobuf.AbstractParser<CharsToIgnore>() { @java.lang.Override public CharsToIgnore parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CharsToIgnore> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CharsToIgnore> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.CharsToIgnore getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
35,669
java-iam/grpc-google-iam-v2/src/main/java/com/google/iam/v2/PoliciesGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.iam.v2; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/iam/v2/policy.proto") @io.grpc.stub.annotations.GrpcGenerated public final class PoliciesGrpc { private PoliciesGrpc() {} public static final java.lang.String SERVICE_NAME = "google.iam.v2.Policies"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.iam.v2.ListPoliciesRequest, com.google.iam.v2.ListPoliciesResponse> getListPoliciesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListPolicies", requestType = com.google.iam.v2.ListPoliciesRequest.class, responseType = com.google.iam.v2.ListPoliciesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2.ListPoliciesRequest, com.google.iam.v2.ListPoliciesResponse> getListPoliciesMethod() { io.grpc.MethodDescriptor< com.google.iam.v2.ListPoliciesRequest, com.google.iam.v2.ListPoliciesResponse> getListPoliciesMethod; if ((getListPoliciesMethod = PoliciesGrpc.getListPoliciesMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getListPoliciesMethod = PoliciesGrpc.getListPoliciesMethod) == null) { PoliciesGrpc.getListPoliciesMethod = getListPoliciesMethod = io.grpc.MethodDescriptor .<com.google.iam.v2.ListPoliciesRequest, com.google.iam.v2.ListPoliciesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListPolicies")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2.ListPoliciesRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2.ListPoliciesResponse.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("ListPolicies")) .build(); } } } return getListPoliciesMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2.GetPolicyRequest, com.google.iam.v2.Policy> getGetPolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetPolicy", requestType = com.google.iam.v2.GetPolicyRequest.class, responseType = com.google.iam.v2.Policy.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2.GetPolicyRequest, com.google.iam.v2.Policy> getGetPolicyMethod() { io.grpc.MethodDescriptor<com.google.iam.v2.GetPolicyRequest, com.google.iam.v2.Policy> getGetPolicyMethod; if ((getGetPolicyMethod = PoliciesGrpc.getGetPolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getGetPolicyMethod = PoliciesGrpc.getGetPolicyMethod) == null) { PoliciesGrpc.getGetPolicyMethod = getGetPolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2.GetPolicyRequest, com.google.iam.v2.Policy>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetPolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2.GetPolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2.Policy.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("GetPolicy")) .build(); } } } return getGetPolicyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2.CreatePolicyRequest, com.google.longrunning.Operation> getCreatePolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreatePolicy", requestType = com.google.iam.v2.CreatePolicyRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2.CreatePolicyRequest, com.google.longrunning.Operation> getCreatePolicyMethod() { io.grpc.MethodDescriptor< com.google.iam.v2.CreatePolicyRequest, com.google.longrunning.Operation> getCreatePolicyMethod; if ((getCreatePolicyMethod = PoliciesGrpc.getCreatePolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getCreatePolicyMethod = PoliciesGrpc.getCreatePolicyMethod) == null) { PoliciesGrpc.getCreatePolicyMethod = getCreatePolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2.CreatePolicyRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreatePolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2.CreatePolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("CreatePolicy")) .build(); } } } return getCreatePolicyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2.UpdatePolicyRequest, com.google.longrunning.Operation> getUpdatePolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdatePolicy", requestType = com.google.iam.v2.UpdatePolicyRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2.UpdatePolicyRequest, com.google.longrunning.Operation> getUpdatePolicyMethod() { io.grpc.MethodDescriptor< com.google.iam.v2.UpdatePolicyRequest, com.google.longrunning.Operation> getUpdatePolicyMethod; if ((getUpdatePolicyMethod = PoliciesGrpc.getUpdatePolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getUpdatePolicyMethod = PoliciesGrpc.getUpdatePolicyMethod) == null) { PoliciesGrpc.getUpdatePolicyMethod = getUpdatePolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2.UpdatePolicyRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdatePolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2.UpdatePolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("UpdatePolicy")) .build(); } } } return getUpdatePolicyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2.DeletePolicyRequest, com.google.longrunning.Operation> getDeletePolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeletePolicy", requestType = com.google.iam.v2.DeletePolicyRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2.DeletePolicyRequest, com.google.longrunning.Operation> getDeletePolicyMethod() { io.grpc.MethodDescriptor< com.google.iam.v2.DeletePolicyRequest, com.google.longrunning.Operation> getDeletePolicyMethod; if ((getDeletePolicyMethod = PoliciesGrpc.getDeletePolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getDeletePolicyMethod = PoliciesGrpc.getDeletePolicyMethod) == null) { PoliciesGrpc.getDeletePolicyMethod = getDeletePolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2.DeletePolicyRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeletePolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2.DeletePolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("DeletePolicy")) .build(); } } } return getDeletePolicyMethod; } /** Creates a new async stub that supports all call types for the service */ public static PoliciesStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesStub>() { @java.lang.Override public PoliciesStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesStub(channel, callOptions); } }; return PoliciesStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static PoliciesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingV2Stub>() { @java.lang.Override public PoliciesBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingV2Stub(channel, callOptions); } }; return PoliciesBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static PoliciesBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingStub>() { @java.lang.Override public PoliciesBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingStub(channel, callOptions); } }; return PoliciesBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static PoliciesFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesFutureStub>() { @java.lang.Override public PoliciesFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesFutureStub(channel, callOptions); } }; return PoliciesFutureStub.newStub(factory, channel); } /** * * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public interface AsyncService { /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ default void listPolicies( com.google.iam.v2.ListPoliciesRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2.ListPoliciesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListPoliciesMethod(), responseObserver); } /** * * * <pre> * Gets a policy. * </pre> */ default void getPolicy( com.google.iam.v2.GetPolicyRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2.Policy> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetPolicyMethod(), responseObserver); } /** * * * <pre> * Creates a policy. * </pre> */ default void createPolicy( com.google.iam.v2.CreatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreatePolicyMethod(), responseObserver); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ default void updatePolicy( com.google.iam.v2.UpdatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdatePolicyMethod(), responseObserver); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ default void deletePolicy( com.google.iam.v2.DeletePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeletePolicyMethod(), responseObserver); } } /** * Base class for the server implementation of the service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public abstract static class PoliciesImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return PoliciesGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesStub extends io.grpc.stub.AbstractAsyncStub<PoliciesStub> { private PoliciesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesStub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public void listPolicies( com.google.iam.v2.ListPoliciesRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2.ListPoliciesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListPoliciesMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Gets a policy. * </pre> */ public void getPolicy( com.google.iam.v2.GetPolicyRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2.Policy> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetPolicyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates a policy. * </pre> */ public void createPolicy( com.google.iam.v2.CreatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreatePolicyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public void updatePolicy( com.google.iam.v2.UpdatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdatePolicyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public void deletePolicy( com.google.iam.v2.DeletePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeletePolicyMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<PoliciesBlockingV2Stub> { private PoliciesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public com.google.iam.v2.ListPoliciesResponse listPolicies( com.google.iam.v2.ListPoliciesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListPoliciesMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a policy. * </pre> */ public com.google.iam.v2.Policy getPolicy(com.google.iam.v2.GetPolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetPolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a policy. * </pre> */ public com.google.longrunning.Operation createPolicy( com.google.iam.v2.CreatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public com.google.longrunning.Operation updatePolicy( com.google.iam.v2.UpdatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public com.google.longrunning.Operation deletePolicy( com.google.iam.v2.DeletePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeletePolicyMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesBlockingStub extends io.grpc.stub.AbstractBlockingStub<PoliciesBlockingStub> { private PoliciesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingStub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public com.google.iam.v2.ListPoliciesResponse listPolicies( com.google.iam.v2.ListPoliciesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListPoliciesMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a policy. * </pre> */ public com.google.iam.v2.Policy getPolicy(com.google.iam.v2.GetPolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetPolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a policy. * </pre> */ public com.google.longrunning.Operation createPolicy( com.google.iam.v2.CreatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public com.google.longrunning.Operation updatePolicy( com.google.iam.v2.UpdatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public com.google.longrunning.Operation deletePolicy( com.google.iam.v2.DeletePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeletePolicyMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesFutureStub extends io.grpc.stub.AbstractFutureStub<PoliciesFutureStub> { private PoliciesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesFutureStub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.iam.v2.ListPoliciesResponse> listPolicies(com.google.iam.v2.ListPoliciesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListPoliciesMethod(), getCallOptions()), request); } /** * * * <pre> * Gets a policy. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.iam.v2.Policy> getPolicy( com.google.iam.v2.GetPolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetPolicyMethod(), getCallOptions()), request); } /** * * * <pre> * Creates a policy. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> createPolicy(com.google.iam.v2.CreatePolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreatePolicyMethod(), getCallOptions()), request); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> updatePolicy(com.google.iam.v2.UpdatePolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdatePolicyMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> deletePolicy(com.google.iam.v2.DeletePolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeletePolicyMethod(), getCallOptions()), request); } } private static final int METHODID_LIST_POLICIES = 0; private static final int METHODID_GET_POLICY = 1; private static final int METHODID_CREATE_POLICY = 2; private static final int METHODID_UPDATE_POLICY = 3; private static final int METHODID_DELETE_POLICY = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_LIST_POLICIES: serviceImpl.listPolicies( (com.google.iam.v2.ListPoliciesRequest) request, (io.grpc.stub.StreamObserver<com.google.iam.v2.ListPoliciesResponse>) responseObserver); break; case METHODID_GET_POLICY: serviceImpl.getPolicy( (com.google.iam.v2.GetPolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.iam.v2.Policy>) responseObserver); break; case METHODID_CREATE_POLICY: serviceImpl.createPolicy( (com.google.iam.v2.CreatePolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_UPDATE_POLICY: serviceImpl.updatePolicy( (com.google.iam.v2.UpdatePolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_DELETE_POLICY: serviceImpl.deletePolicy( (com.google.iam.v2.DeletePolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getListPoliciesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2.ListPoliciesRequest, com.google.iam.v2.ListPoliciesResponse>( service, METHODID_LIST_POLICIES))) .addMethod( getGetPolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers<com.google.iam.v2.GetPolicyRequest, com.google.iam.v2.Policy>( service, METHODID_GET_POLICY))) .addMethod( getCreatePolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2.CreatePolicyRequest, com.google.longrunning.Operation>( service, METHODID_CREATE_POLICY))) .addMethod( getUpdatePolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2.UpdatePolicyRequest, com.google.longrunning.Operation>( service, METHODID_UPDATE_POLICY))) .addMethod( getDeletePolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2.DeletePolicyRequest, com.google.longrunning.Operation>( service, METHODID_DELETE_POLICY))) .build(); } private abstract static class PoliciesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { PoliciesBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.iam.v2.PolicyProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("Policies"); } } private static final class PoliciesFileDescriptorSupplier extends PoliciesBaseDescriptorSupplier { PoliciesFileDescriptorSupplier() {} } private static final class PoliciesMethodDescriptorSupplier extends PoliciesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; PoliciesMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (PoliciesGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new PoliciesFileDescriptorSupplier()) .addMethod(getListPoliciesMethod()) .addMethod(getGetPolicyMethod()) .addMethod(getCreatePolicyMethod()) .addMethod(getUpdatePolicyMethod()) .addMethod(getDeletePolicyMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
35,515
java-configdelivery/proto-google-cloud-configdelivery-v1/src/main/java/com/google/cloud/configdelivery/v1/DeleteResourceBundleRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/configdelivery/v1/config_delivery.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.configdelivery.v1; /** * * * <pre> * Message for deleting a ResourceBundle * </pre> * * Protobuf type {@code google.cloud.configdelivery.v1.DeleteResourceBundleRequest} */ public final class DeleteResourceBundleRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.configdelivery.v1.DeleteResourceBundleRequest) DeleteResourceBundleRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteResourceBundleRequest.newBuilder() to construct. private DeleteResourceBundleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteResourceBundleRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteResourceBundleRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.configdelivery.v1.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1_DeleteResourceBundleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.configdelivery.v1.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1_DeleteResourceBundleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest.class, com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FORCE_FIELD_NUMBER = 3; private boolean force_ = false; /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (force_ != false) { output.writeBool(3, force_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (force_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest)) { return super.equals(obj); } com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest other = (com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getForce() != other.getForce()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for deleting a ResourceBundle * </pre> * * Protobuf type {@code google.cloud.configdelivery.v1.DeleteResourceBundleRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.configdelivery.v1.DeleteResourceBundleRequest) com.google.cloud.configdelivery.v1.DeleteResourceBundleRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.configdelivery.v1.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1_DeleteResourceBundleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.configdelivery.v1.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1_DeleteResourceBundleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest.class, com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest.Builder.class); } // Construct using com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; force_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.configdelivery.v1.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1_DeleteResourceBundleRequest_descriptor; } @java.lang.Override public com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest getDefaultInstanceForType() { return com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest build() { com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest buildPartial() { com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest result = new com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.force_ = force_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest) { return mergeFrom((com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest other) { if (other == com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getForce() != false) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { force_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean force_; /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The force to set. * @return This builder for chaining. */ public Builder setForce(boolean value) { force_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000004); force_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.configdelivery.v1.DeleteResourceBundleRequest) } // @@protoc_insertion_point(class_scope:google.cloud.configdelivery.v1.DeleteResourceBundleRequest) private static final com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest(); } public static com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteResourceBundleRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteResourceBundleRequest>() { @java.lang.Override public DeleteResourceBundleRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteResourceBundleRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteResourceBundleRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.configdelivery.v1.DeleteResourceBundleRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/druid
35,574
services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import com.google.inject.Provider; import org.apache.calcite.avatica.remote.ProtobufTranslation; import org.apache.calcite.avatica.remote.ProtobufTranslationImpl; import org.apache.calcite.avatica.remote.Service; import org.apache.commons.io.IOUtils; import org.apache.druid.client.selector.Server; import org.apache.druid.guice.annotations.Json; import org.apache.druid.guice.annotations.Smile; import org.apache.druid.guice.http.DruidHttpClientConfig; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.BaseQuery; import org.apache.druid.query.DruidMetrics; import org.apache.druid.query.GenericQueryMetricsFactory; import org.apache.druid.query.Query; import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryMetrics; import org.apache.druid.query.QueryToolChestWarehouse; import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.server.initialization.jetty.HttpException; import org.apache.druid.server.initialization.jetty.StandardResponseHeaderFilterHolder; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.metrics.QueryCountStatsProvider; import org.apache.druid.server.router.QueryHostFinder; import org.apache.druid.server.router.Router; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Authenticator; import org.apache.druid.server.security.AuthenticatorMapper; import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.sql.http.SqlQuery; import org.apache.druid.sql.http.SqlResource; import org.eclipse.jetty.client.BytesRequestContent; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.Request; import org.eclipse.jetty.client.Response; import org.eclipse.jetty.client.Result; import org.eclipse.jetty.ee8.proxy.AsyncProxyServlet; import org.eclipse.jetty.http.HttpHeader; import org.eclipse.jetty.http.HttpMethod; import javax.annotation.Nullable; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * This class does async query processing and should be merged with QueryResource at some point */ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements QueryCountStatsProvider { private static final EmittingLogger LOG = new EmittingLogger(AsyncQueryForwardingServlet.class); @Deprecated // use SmileMediaTypes.APPLICATION_JACKSON_SMILE private static final String APPLICATION_SMILE = "application/smile"; private static final String AVATICA_CONNECTION_ID = "connectionId"; private static final String AVATICA_STATEMENT_HANDLE = "statementHandle"; private static final String HOST_ATTRIBUTE = "org.apache.druid.proxy.to.host"; private static final String SCHEME_ATTRIBUTE = "org.apache.druid.proxy.to.host.scheme"; private static final String QUERY_ATTRIBUTE = "org.apache.druid.proxy.query"; private static final String AVATICA_QUERY_ATTRIBUTE = "org.apache.druid.proxy.avaticaQuery"; private static final String SQL_QUERY_ATTRIBUTE = "org.apache.druid.proxy.sqlQuery"; private static final String OBJECTMAPPER_ATTRIBUTE = "org.apache.druid.proxy.objectMapper"; private static final String PROPERTY_SQL_ENABLE = "druid.router.sql.enable"; private static final String PROPERTY_SQL_ENABLE_DEFAULT = "false"; private static final long CANCELLATION_TIMEOUT_MILLIS = TimeUnit.SECONDS.toMillis(5); private final AtomicLong successfulQueryCount = new AtomicLong(); private final AtomicLong failedQueryCount = new AtomicLong(); private final AtomicLong interruptedQueryCount = new AtomicLong(); @VisibleForTesting void handleException(HttpServletResponse response, ObjectMapper objectMapper, Exception exception) throws IOException { QueryInterruptedException exceptionToReport = QueryInterruptedException.wrapIfNeeded(exception); LOG.warn(exceptionToReport, "Unexpected exception occurs"); if (!response.isCommitted()) { response.resetBuffer(); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); objectMapper.writeValue( response.getOutputStream(), serverConfig.getErrorResponseTransformStrategy().transformIfNeeded(exceptionToReport) ); } response.flushBuffer(); } private final QueryToolChestWarehouse warehouse; private final ObjectMapper jsonMapper; private final ObjectMapper smileMapper; private final QueryHostFinder hostFinder; private final Provider<HttpClient> httpClientProvider; private final DruidHttpClientConfig httpClientConfig; private final ServiceEmitter emitter; private final RequestLogger requestLogger; private final GenericQueryMetricsFactory queryMetricsFactory; private final AuthenticatorMapper authenticatorMapper; private final ProtobufTranslation protobufTranslation; private final ServerConfig serverConfig; private final boolean routeSqlByStrategy; private HttpClient broadcastClient; @Inject public AsyncQueryForwardingServlet( QueryToolChestWarehouse warehouse, @Json ObjectMapper jsonMapper, @Smile ObjectMapper smileMapper, QueryHostFinder hostFinder, @Router Provider<HttpClient> httpClientProvider, @Router DruidHttpClientConfig httpClientConfig, ServiceEmitter emitter, RequestLogger requestLogger, GenericQueryMetricsFactory queryMetricsFactory, AuthenticatorMapper authenticatorMapper, Properties properties, final ServerConfig serverConfig ) { this.warehouse = warehouse; this.jsonMapper = jsonMapper; this.smileMapper = smileMapper; this.hostFinder = hostFinder; this.httpClientProvider = httpClientProvider; this.httpClientConfig = httpClientConfig; this.emitter = emitter; this.requestLogger = requestLogger; this.queryMetricsFactory = queryMetricsFactory; this.authenticatorMapper = authenticatorMapper; this.protobufTranslation = new ProtobufTranslationImpl(); this.routeSqlByStrategy = Boolean.parseBoolean( properties.getProperty(PROPERTY_SQL_ENABLE, PROPERTY_SQL_ENABLE_DEFAULT) ); this.serverConfig = serverConfig; } @Override public void init() throws ServletException { super.init(); // Note that httpClientProvider is setup to return same HttpClient instance on each get() so // it is same http client as that is used by parent ProxyServlet. broadcastClient = newHttpClient(); try { broadcastClient.start(); } catch (Exception e) { throw new ServletException(e); } } @Override public void destroy() { super.destroy(); try { broadcastClient.stop(); } catch (Exception e) { LOG.warn(e, "Error stopping servlet"); } } @Override protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { final boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(request.getContentType()) || APPLICATION_SMILE.equals(request.getContentType()); final ObjectMapper objectMapper = isSmile ? smileMapper : jsonMapper; request.setAttribute(OBJECTMAPPER_ATTRIBUTE, objectMapper); final String requestURI = request.getRequestURI(); final String method = request.getMethod(); final Server targetServer; // The Router does not have the ability to look inside SQL queries and route them intelligently, so just treat // them as a generic request. final boolean isNativeQueryEndpoint = requestURI.startsWith("/druid/v2") && !requestURI.startsWith("/druid/v2/sql"); final boolean isSqlQueryEndpoint = requestURI.startsWith("/druid/v2/sql"); final boolean isAvaticaJson = requestURI.startsWith("/druid/v2/sql/avatica"); final boolean isAvaticaPb = requestURI.startsWith("/druid/v2/sql/avatica-protobuf"); if (isAvaticaPb) { byte[] requestBytes = IOUtils.toByteArray(request.getInputStream()); Service.Request protobufRequest = this.protobufTranslation.parseRequest(requestBytes); String connectionId = getAvaticaProtobufConnectionId(protobufRequest); targetServer = hostFinder.findServerAvatica(connectionId); request.setAttribute(AVATICA_QUERY_ATTRIBUTE, requestBytes); LOG.debug("Forwarding protobuf JDBC connection [%s] to broker [%s]", connectionId, targetServer); } else if (isAvaticaJson) { Map<String, Object> requestMap = objectMapper.readValue( request.getInputStream(), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); String connectionId = getAvaticaConnectionId(requestMap); targetServer = hostFinder.findServerAvatica(connectionId); byte[] requestBytes = objectMapper.writeValueAsBytes(requestMap); request.setAttribute(AVATICA_QUERY_ATTRIBUTE, requestBytes); LOG.debug("Forwarding JDBC connection [%s] to broker [%s]", connectionId, targetServer.getHost()); } else if (HttpMethod.DELETE.is(method)) { // query cancellation request targetServer = hostFinder.pickDefaultServer(); broadcastQueryCancelRequest(request, targetServer); LOG.debug("Broadcasting cancellation request to all brokers"); } else if (isNativeQueryEndpoint && HttpMethod.POST.is(method)) { // query request try { Query inputQuery = objectMapper.readValue(request.getInputStream(), Query.class); if (inputQuery != null) { targetServer = hostFinder.pickServer(inputQuery); if (inputQuery.getId() == null) { inputQuery = inputQuery.withId(UUID.randomUUID().toString()); } LOG.debug("Forwarding JSON query [%s] to broker [%s]", inputQuery.getId(), targetServer.getHost()); } else { targetServer = hostFinder.pickDefaultServer(); LOG.debug("Forwarding JSON request to broker [%s]", targetServer.getHost()); } request.setAttribute(QUERY_ATTRIBUTE, inputQuery); } catch (IOException e) { handleQueryParseException(request, response, objectMapper, e, true); return; } catch (Exception e) { handleException(response, objectMapper, e); return; } } else if (isSqlQueryEndpoint && HttpMethod.POST.is(method)) { try { SqlQuery inputSqlQuery = SqlQuery.from(request, objectMapper); inputSqlQuery = buildSqlQueryWithId(inputSqlQuery); request.setAttribute(SQL_QUERY_ATTRIBUTE, inputSqlQuery); if (routeSqlByStrategy) { targetServer = hostFinder.findServerSql(inputSqlQuery); } else { targetServer = hostFinder.pickDefaultServer(); } LOG.debug("Forwarding SQL query to broker [%s]", targetServer.getHost()); } catch (HttpException e) { handleQueryParseException(request, response, e.getStatusCode().getStatusCode(), objectMapper, e, false); return; } catch (Exception e) { handleException(response, objectMapper, e); return; } } else { targetServer = hostFinder.pickDefaultServer(); LOG.debug("Forwarding query to broker [%s]", targetServer.getHost()); } request.setAttribute(HOST_ATTRIBUTE, targetServer.getHost()); request.setAttribute(SCHEME_ATTRIBUTE, targetServer.getScheme()); doService(request, response); } /** * Rebuilds the {@link SqlQuery} object with sqlQueryId and queryId context parameters if not present * * @param sqlQuery the original SqlQuery * @return an updated sqlQuery object with sqlQueryId and queryId context parameters */ private SqlQuery buildSqlQueryWithId(SqlQuery sqlQuery) { Map<String, Object> context = new HashMap<>(sqlQuery.getContext()); String sqlQueryId = (String) context.getOrDefault(BaseQuery.SQL_QUERY_ID, UUID.randomUUID().toString()); // set queryId to sqlQueryId if not overridden String queryId = (String) context.getOrDefault(BaseQuery.QUERY_ID, sqlQueryId); context.put(BaseQuery.SQL_QUERY_ID, sqlQueryId); context.put(BaseQuery.QUERY_ID, queryId); return sqlQuery.withOverridenContext(context); } /** * Issues async query cancellation requests to all Brokers (except the given * targetServer). Query cancellation on the targetServer is handled by the * proxy servlet. */ private void broadcastQueryCancelRequest(HttpServletRequest request, Server targetServer) { for (final Server server : hostFinder.getAllServers()) { if (server.getHost().equals(targetServer.getHost())) { continue; } // issue async requests Response.CompleteListener completeListener = result -> { if (result.isFailed()) { LOG.noStackTrace().info( result.getFailure(), "Failed to forward cancellation request to [%s]", server.getHost() ); } }; Request broadcastReq = broadcastClient .newRequest(rewriteURI(request, server.getScheme(), server.getHost())) .method(HttpMethod.DELETE) .timeout(CANCELLATION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); copyRequestHeaders(request, broadcastReq); broadcastReq.send(completeListener); } interruptedQueryCount.incrementAndGet(); } @VisibleForTesting void handleQueryParseException( HttpServletRequest request, HttpServletResponse response, ObjectMapper objectMapper, Throwable parseException, boolean isNativeQuery ) throws IOException { handleQueryParseException(request, response, HttpServletResponse.SC_BAD_REQUEST, objectMapper, parseException, isNativeQuery); } private void handleQueryParseException( HttpServletRequest request, HttpServletResponse response, int httpStatusCode, ObjectMapper objectMapper, Throwable parseException, boolean isNativeQuery ) throws IOException { QueryInterruptedException exceptionToReport = QueryInterruptedException.wrapIfNeeded(parseException); LOG.warn(exceptionToReport, "Exception parsing query"); // Log the error message final String errorMessage = exceptionToReport.getMessage() == null ? "no error message" : exceptionToReport.getMessage(); AuthenticationResult authenticationResult = AuthorizationUtils.authenticationResultFromRequest(request); if (isNativeQuery) { requestLogger.logNativeQuery( RequestLogLine.forNative( null, DateTimes.nowUtc(), request.getRemoteAddr(), new QueryStats(ImmutableMap.of("success", false, "exception", errorMessage, "identity", authenticationResult.getIdentity())) ) ); } else { requestLogger.logSqlQuery( RequestLogLine.forSql( null, null, DateTimes.nowUtc(), request.getRemoteAddr(), new QueryStats(ImmutableMap.of("success", false, "exception", errorMessage, "identity", authenticationResult.getIdentity())) ) ); } // Write to the response response.setStatus(httpStatusCode); response.setContentType(MediaType.APPLICATION_JSON); objectMapper.writeValue( response.getOutputStream(), serverConfig.getErrorResponseTransformStrategy().transformIfNeeded(exceptionToReport) ); } protected void doService( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { // Just call the superclass service method. Overridden in tests. super.service(request, response); } @Override protected void sendProxyRequest( HttpServletRequest clientRequest, HttpServletResponse proxyResponse, Request proxyRequest ) { proxyRequest.timeout(httpClientConfig.getReadTimeout().getMillis(), TimeUnit.MILLISECONDS); proxyRequest.idleTimeout(httpClientConfig.getReadTimeout().getMillis(), TimeUnit.MILLISECONDS); byte[] avaticaQuery = (byte[]) clientRequest.getAttribute(AVATICA_QUERY_ATTRIBUTE); if (avaticaQuery != null) { proxyRequest.body(new BytesRequestContent(avaticaQuery)); } final Query query = (Query) clientRequest.getAttribute(QUERY_ATTRIBUTE); final SqlQuery sqlQuery = (SqlQuery) clientRequest.getAttribute(SQL_QUERY_ATTRIBUTE); if (query != null) { setProxyRequestContent(proxyRequest, clientRequest, query); } else if (sqlQuery != null) { setProxyRequestContent(proxyRequest, clientRequest, sqlQuery); } // Since we can't see the request object on the remote side, we can't check whether the remote side actually // performed an authorization check here, so always set this to true for the proxy servlet. // If the remote node failed to perform an authorization check, PreResponseAuthorizationCheckFilter // will log that on the remote node. clientRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); // Check if there is an authentication result and use it to decorate the proxy request if needed. AuthenticationResult authenticationResult = (AuthenticationResult) clientRequest.getAttribute( AuthConfig.DRUID_AUTHENTICATION_RESULT); if (authenticationResult != null && authenticationResult.getAuthenticatedBy() != null) { Authenticator authenticator = authenticatorMapper.getAuthenticatorMap() .get(authenticationResult.getAuthenticatedBy()); if (authenticator != null) { authenticator.decorateProxyRequest( clientRequest, proxyResponse, proxyRequest ); } else { LOG.error("Can not find Authenticator with Name [%s]", authenticationResult.getAuthenticatedBy()); } } super.sendProxyRequest( clientRequest, proxyResponse, proxyRequest ); } private void setProxyRequestContent(Request proxyRequest, HttpServletRequest clientRequest, Object content) { final ObjectMapper objectMapper = (ObjectMapper) clientRequest.getAttribute(OBJECTMAPPER_ATTRIBUTE); try { byte[] bytes = objectMapper.writeValueAsBytes(content); Request.Content requestContent = new BytesRequestContent(bytes); proxyRequest.body(requestContent); proxyRequest.headers(headers -> { headers.put(HttpHeader.CONTENT_LENGTH, String.valueOf(requestContent.getLength())); headers.put(HttpHeader.CONTENT_TYPE, objectMapper.getFactory() instanceof SmileFactory ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON); }); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } @Override protected Response.Listener newProxyResponseListener(HttpServletRequest request, HttpServletResponse response) { boolean isJDBC = request.getAttribute(AVATICA_QUERY_ATTRIBUTE) != null; return newMetricsEmittingProxyResponseListener( request, response, (Query) request.getAttribute(QUERY_ATTRIBUTE), (SqlQuery) request.getAttribute(SQL_QUERY_ATTRIBUTE), isJDBC, System.nanoTime() ); } @Override protected String rewriteTarget(HttpServletRequest request) { return rewriteURI( request, (String) request.getAttribute(SCHEME_ATTRIBUTE), (String) request.getAttribute(HOST_ATTRIBUTE) ); } protected String rewriteURI(HttpServletRequest request, String scheme, String host) { return makeURI(scheme, host, request.getRequestURI(), request.getQueryString()); } @VisibleForTesting static String makeURI(String scheme, String host, String requestURI, String rawQueryString) { return JettyUtils.concatenateForRewrite( scheme + "://" + host, requestURI, rawQueryString ); } @Override protected HttpClient newHttpClient() { return httpClientProvider.get(); } @Override protected HttpClient createHttpClient() throws ServletException { HttpClient client = super.createHttpClient(); // override timeout set in ProxyServlet.createHttpClient setTimeout(httpClientConfig.getReadTimeout().getMillis()); return client; } private Response.Listener newMetricsEmittingProxyResponseListener( HttpServletRequest request, HttpServletResponse response, @Nullable Query query, @Nullable SqlQuery sqlQuery, boolean isJDBC, long startNs ) { return new MetricsEmittingProxyResponseListener(request, response, query, sqlQuery, isJDBC, startNs); } @Override public long getSuccessfulQueryCount() { return successfulQueryCount.get(); } @Override public long getFailedQueryCount() { return failedQueryCount.get(); } @Override public long getInterruptedQueryCount() { return interruptedQueryCount.get(); } @Override public long getTimedOutQueryCount() { // Query timeout metric is not relevant here and this metric is already being tracked in the Broker and the // data nodes using QueryResource return 0L; } @Override protected void onServerResponseHeaders( HttpServletRequest clientRequest, HttpServletResponse proxyResponse, Response serverResponse ) { StandardResponseHeaderFilterHolder.deduplicateHeadersInProxyServlet(proxyResponse, serverResponse); super.onServerResponseHeaders(clientRequest, proxyResponse, serverResponse); } @VisibleForTesting static String getAvaticaConnectionId(Map<String, Object> requestMap) { // avatica commands always have a 'connectionId'. If commands are not part of a prepared statement, this appears at // the top level of the request, but if it is part of a statement, then it will be nested in the 'statementHandle'. // see https://calcite.apache.org/avatica/docs/json_reference.html#requests for more details Object connectionIdObj = requestMap.get(AVATICA_CONNECTION_ID); if (connectionIdObj == null) { Object statementHandle = requestMap.get(AVATICA_STATEMENT_HANDLE); if (statementHandle != null && statementHandle instanceof Map) { connectionIdObj = ((Map) statementHandle).get(AVATICA_CONNECTION_ID); } } if (connectionIdObj == null) { throw new IAE("Received an Avatica request without a %s.", AVATICA_CONNECTION_ID); } if (!(connectionIdObj instanceof String)) { throw new IAE("Received an Avatica request with a non-String %s.", AVATICA_CONNECTION_ID); } return (String) connectionIdObj; } static String getAvaticaProtobufConnectionId(Service.Request request) { if (request instanceof Service.CatalogsRequest) { return ((Service.CatalogsRequest) request).connectionId; } if (request instanceof Service.SchemasRequest) { return ((Service.SchemasRequest) request).connectionId; } if (request instanceof Service.TablesRequest) { return ((Service.TablesRequest) request).connectionId; } if (request instanceof Service.TypeInfoRequest) { return ((Service.TypeInfoRequest) request).connectionId; } if (request instanceof Service.ColumnsRequest) { return ((Service.ColumnsRequest) request).connectionId; } if (request instanceof Service.ExecuteRequest) { return ((Service.ExecuteRequest) request).statementHandle.connectionId; } if (request instanceof Service.TableTypesRequest) { return ((Service.TableTypesRequest) request).connectionId; } if (request instanceof Service.PrepareRequest) { return ((Service.PrepareRequest) request).connectionId; } if (request instanceof Service.PrepareAndExecuteRequest) { return ((Service.PrepareAndExecuteRequest) request).connectionId; } if (request instanceof Service.FetchRequest) { return ((Service.FetchRequest) request).connectionId; } if (request instanceof Service.CreateStatementRequest) { return ((Service.CreateStatementRequest) request).connectionId; } if (request instanceof Service.CloseStatementRequest) { return ((Service.CloseStatementRequest) request).connectionId; } if (request instanceof Service.OpenConnectionRequest) { return ((Service.OpenConnectionRequest) request).connectionId; } if (request instanceof Service.CloseConnectionRequest) { return ((Service.CloseConnectionRequest) request).connectionId; } if (request instanceof Service.ConnectionSyncRequest) { return ((Service.ConnectionSyncRequest) request).connectionId; } if (request instanceof Service.DatabasePropertyRequest) { return ((Service.DatabasePropertyRequest) request).connectionId; } if (request instanceof Service.SyncResultsRequest) { return ((Service.SyncResultsRequest) request).connectionId; } if (request instanceof Service.CommitRequest) { return ((Service.CommitRequest) request).connectionId; } if (request instanceof Service.RollbackRequest) { return ((Service.RollbackRequest) request).connectionId; } if (request instanceof Service.PrepareAndExecuteBatchRequest) { return ((Service.PrepareAndExecuteBatchRequest) request).connectionId; } if (request instanceof Service.ExecuteBatchRequest) { return ((Service.ExecuteBatchRequest) request).connectionId; } throw new IAE("Received an unknown Avatica protobuf request"); } private class MetricsEmittingProxyResponseListener<T> extends ProxyResponseListener { private final HttpServletRequest req; @Nullable private final Query<T> query; @Nullable private final SqlQuery sqlQuery; private final boolean isJDBC; private final long startNs; public MetricsEmittingProxyResponseListener( HttpServletRequest request, HttpServletResponse response, @Nullable Query<T> query, @Nullable SqlQuery sqlQuery, boolean isJDBC, long startNs ) { super(request, response); this.req = request; this.query = query; this.sqlQuery = sqlQuery; this.isJDBC = isJDBC; this.startNs = startNs; } @Override public void onComplete(Result result) { final long requestTimeNs = System.nanoTime() - startNs; String queryId = null; String sqlQueryId = null; if (isJDBC) { sqlQueryId = result.getResponse().getHeaders().get(SqlResource.SQL_QUERY_ID_RESPONSE_HEADER); } else if (sqlQuery != null) { sqlQueryId = (String) sqlQuery.getContext().getOrDefault(BaseQuery.SQL_QUERY_ID, null); queryId = (String) sqlQuery.getContext().getOrDefault(BaseQuery.QUERY_ID, null); } else if (query != null) { queryId = query.getId(); } // not a native or SQL query, no need to emit metrics and logs if (queryId == null && sqlQueryId == null) { super.onComplete(result); return; } boolean success = result.isSucceeded(); if (success) { successfulQueryCount.incrementAndGet(); } else { failedQueryCount.incrementAndGet(); } emitQueryTime(requestTimeNs, success, sqlQueryId, queryId); AuthenticationResult authenticationResult = AuthorizationUtils.authenticationResultFromRequest(req); //noinspection VariableNotUsedInsideIf if (sqlQueryId != null) { // SQL query doesn't have a native query translation in router. Hence, not logging the native query. if (sqlQuery != null) { try { requestLogger.logSqlQuery( RequestLogLine.forSql( sqlQuery.getQuery(), sqlQuery.getContext(), DateTimes.nowUtc(), req.getRemoteAddr(), new QueryStats( ImmutableMap.of( "query/time", TimeUnit.NANOSECONDS.toMillis(requestTimeNs), "success", success && result.getResponse().getStatus() == Status.OK.getStatusCode(), "identity", authenticationResult.getIdentity() ) ) ) ); } catch (IOException e) { LOG.error(e, "Unable to log SQL query [%s]!", sqlQuery); } } super.onComplete(result); return; } try { requestLogger.logNativeQuery( RequestLogLine.forNative( query, DateTimes.nowUtc(), req.getRemoteAddr(), new QueryStats( ImmutableMap.of( "query/time", TimeUnit.NANOSECONDS.toMillis(requestTimeNs), "success", success && result.getResponse().getStatus() == Status.OK.getStatusCode(), "identity", authenticationResult.getIdentity() ) ) ) ); } catch (Exception e) { LOG.error(e, "Unable to log query [%s]!", query); } super.onComplete(result); } @Override public void onFailure(Response response, Throwable failure) { final long requestTimeNs = System.nanoTime() - startNs; final String errorMessage = failure.getMessage(); String queryId = null; String sqlQueryId = null; if (isJDBC) { sqlQueryId = response.getHeaders().get(SqlResource.SQL_QUERY_ID_RESPONSE_HEADER); } else if (sqlQuery != null) { sqlQueryId = (String) sqlQuery.getContext().getOrDefault(BaseQuery.SQL_QUERY_ID, null); queryId = (String) sqlQuery.getContext().getOrDefault(BaseQuery.QUERY_ID, null); } else if (query != null) { queryId = query.getId(); } // not a native or SQL query, no need to emit metrics and logs if (queryId == null && sqlQueryId == null) { super.onFailure(response, failure); return; } failedQueryCount.incrementAndGet(); emitQueryTime(requestTimeNs, false, sqlQueryId, queryId); AuthenticationResult authenticationResult = AuthorizationUtils.authenticationResultFromRequest(req); //noinspection VariableNotUsedInsideIf if (sqlQueryId != null) { // SQL query doesn't have a native query translation in router. Hence, not logging the native query. if (sqlQuery != null) { try { requestLogger.logSqlQuery( RequestLogLine.forSql( sqlQuery.getQuery(), sqlQuery.getContext(), DateTimes.nowUtc(), req.getRemoteAddr(), new QueryStats( ImmutableMap.of( "success", false, "exception", errorMessage == null ? "no message" : errorMessage, "identity", authenticationResult.getIdentity() ) ) ) ); } catch (IOException e) { LOG.error(e, "Unable to log SQL query [%s]!", sqlQuery); } LOG.makeAlert(failure, "Exception handling request") .addData("exception", failure.toString()) .addData("sqlQuery", sqlQuery) .addData("peer", req.getRemoteAddr()) .emit(); } super.onFailure(response, failure); return; } try { requestLogger.logNativeQuery( RequestLogLine.forNative( query, DateTimes.nowUtc(), req.getRemoteAddr(), new QueryStats( ImmutableMap.of( "success", false, "exception", errorMessage == null ? "no message" : errorMessage, "identity", authenticationResult.getIdentity() ) ) ) ); } catch (IOException logError) { LOG.error(logError, "Unable to log query [%s]!", query); } LOG.makeAlert(failure, "Exception handling request") .addData("exception", failure.toString()) .addData("query", query) .addData("peer", req.getRemoteAddr()) .emit(); super.onFailure(response, failure); } private void emitQueryTime( long requestTimeNs, boolean success, @Nullable String sqlQueryId, @Nullable String queryId ) { QueryMetrics queryMetrics; if (sqlQueryId != null) { queryMetrics = queryMetricsFactory.makeMetrics(); queryMetrics.remoteAddress(req.getRemoteAddr()); // Setting sqlQueryId and queryId dimensions to the metric queryMetrics.sqlQueryId(sqlQueryId); if (queryId != null) { // query id is null for JDBC SQL queryMetrics.queryId(queryId); } } else { queryMetrics = DruidMetrics.makeRequestMetrics( queryMetricsFactory, warehouse.getToolChest(query), query, req.getRemoteAddr() ); } queryMetrics.success(success); queryMetrics.reportQueryTime(requestTimeNs).emit(emitter); } } }
apache/ignite-3
34,885
modules/sql-engine/src/test/java/org/apache/ignite/internal/sql/engine/planner/OutOfRangeLiteralsInComparisonReductionPlannerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.sql.engine.planner; import static org.junit.jupiter.params.provider.Arguments.arguments; import java.util.function.Predicate; import java.util.function.UnaryOperator; import java.util.stream.Stream; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.ignite.internal.sql.engine.framework.TestBuilders.TableBuilder; import org.apache.ignite.internal.sql.engine.rel.IgniteValues; import org.apache.ignite.internal.sql.engine.rel.ProjectableFilterableTableScan; import org.apache.ignite.internal.sql.engine.schema.IgniteSchema; import org.apache.ignite.internal.sql.engine.trait.IgniteDistributions; import org.apache.ignite.internal.sql.engine.type.IgniteTypeFactory; import org.apache.ignite.internal.type.NativeTypes; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; /** Test to validate behaviour of constant reduction in binary comparison operation. */ public class OutOfRangeLiteralsInComparisonReductionPlannerTest extends AbstractPlannerTest { /** * Index bound checks - search key lies out of value range. */ @ParameterizedTest @MethodSource("args") void testBoundsTypeLimits(RelDataType type, String expression, String expectedExpression) throws Exception { IgniteSchema schema = createSchemaFrom(table("TEST", "C2", type)); Predicate<RelNode> matcher; if ("emptyValuesNode".equals(expectedExpression)) { matcher = emptyValuesNode(); } else { matcher = expression(expectedExpression); } assertPlan("SELECT * FROM test WHERE C2 " + expression, schema, matcher); } private static Stream<Arguments> args() { return Stream.of( // Column type, expr to use in condition, expected expression. // TINYINT arguments(sqlType(SqlTypeName.TINYINT), "= -129", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "= -128.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "= -128", "=($t1, -128)"), arguments(sqlType(SqlTypeName.TINYINT), "= -127.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "= 128", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "= 127.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "= 127", "=($t1, 127)"), arguments(sqlType(SqlTypeName.TINYINT), "= 126.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "IN (-129, -128.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "IN (-128.1, -127.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "IN (-128.1, -128, -127.1)", "=($t1, -128)"), arguments(sqlType(SqlTypeName.TINYINT), "IN (-128.1, -128, -127.1, -127)", "SEARCH($t1, Sarg[-128.0:TINYINT, -127.0:TINYINT]:TINYINT)"), arguments(sqlType(SqlTypeName.TINYINT), "IN (128, 127.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "IN (127.1, 126.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "IN (127.1, 127, 126.1)", "=($t1, 127)"), arguments(sqlType(SqlTypeName.TINYINT), "IN (127.1, 127, 126.1, 126)", "SEARCH($t1, Sarg[126.0:TINYINT, 127.0:TINYINT]:TINYINT)"), arguments(sqlType(SqlTypeName.TINYINT), ">= -129", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), ">= -128.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), ">= -128", ">=($t1, -128)"), arguments(sqlType(SqlTypeName.TINYINT), ">= -127.1", ">=($t1, -127)"), arguments(sqlType(SqlTypeName.TINYINT), ">= 128", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), ">= 127.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), ">= 127", ">=($t1, 127)"), arguments(sqlType(SqlTypeName.TINYINT), ">= 126.1", ">($t1, 126)"), arguments(sqlType(SqlTypeName.TINYINT), "> -129", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), "> -128.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), "> -128", ">($t1, -128)"), arguments(sqlType(SqlTypeName.TINYINT), "> -127.1", ">=($t1, -127)"), arguments(sqlType(SqlTypeName.TINYINT), "> 128", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "> 127.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "> 127", ">($t1, 127)"), arguments(sqlType(SqlTypeName.TINYINT), "> 126.1", ">($t1, 126)"), arguments(sqlType(SqlTypeName.TINYINT), "< -129", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "< -128.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "< -128", "<($t1, -128)"), arguments(sqlType(SqlTypeName.TINYINT), "< -127.1", "<($t1, -127)"), arguments(sqlType(SqlTypeName.TINYINT), "< 128", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), "< 127.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), "< 127", "<($t1, 127)"), arguments(sqlType(SqlTypeName.TINYINT), "< 126.1", "<=($t1, 126)"), arguments(sqlType(SqlTypeName.TINYINT), "<= -129", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "<= -128.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.TINYINT), "<= -128", "<=($t1, -128)"), arguments(sqlType(SqlTypeName.TINYINT), "<= -127.1", "<($t1, -127)"), arguments(sqlType(SqlTypeName.TINYINT), "<= 128", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), "<= 127.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.TINYINT), "<= 127", "<=($t1, 127)"), arguments(sqlType(SqlTypeName.TINYINT), "<= 126.1", "<=($t1, 126)"), // SMALLINT arguments(sqlType(SqlTypeName.SMALLINT), "= -32769", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "= -32768.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "= -32768", "=($t1, -32768)"), arguments(sqlType(SqlTypeName.SMALLINT), "= -32767.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "= 32768", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "= 32767.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "= 32767", "=($t1, 32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "= 32766.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (-32769, -32768.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (-32768.1, -32767.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (-32768.1, -32768, -32767.1)", "=($t1, -32768)"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (-32768.1, -32768, -32767.1, -32767)", "SEARCH($t1, Sarg[-32768.0:SMALLINT, -32767.0:SMALLINT]:SMALLINT)"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (32768, 32767.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (32767.1, 32766.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (32767.1, 32767, 32766.1)", "=($t1, 32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "IN (32767.1, 32767, 32766.1, 32766)", "SEARCH($t1, Sarg[32766.0:SMALLINT, 32767.0:SMALLINT]:SMALLINT)"), arguments(sqlType(SqlTypeName.SMALLINT), ">= -32769", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), ">= -32768.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), ">= -32768", ">=($t1, -32768)"), arguments(sqlType(SqlTypeName.SMALLINT), ">= -32767.1", ">=($t1, -32767)"), arguments(sqlType(SqlTypeName.SMALLINT), ">= 32768", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), ">= 32767.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), ">= 32767", ">=($t1, 32767)"), arguments(sqlType(SqlTypeName.SMALLINT), ">= 32766.1", ">($t1, 32766)"), arguments(sqlType(SqlTypeName.SMALLINT), "> -32769", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), "> -32768.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), "> -32768", ">($t1, -32768)"), arguments(sqlType(SqlTypeName.SMALLINT), "> -32767.1", ">=($t1, -32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "> 32768", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "> 32767.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "> 32767", ">($t1, 32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "> 32766.1", ">($t1, 32766)"), arguments(sqlType(SqlTypeName.SMALLINT), "< -32769", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "< -32768.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "< -32768", "<($t1, -32768)"), arguments(sqlType(SqlTypeName.SMALLINT), "< -32767.1", "<($t1, -32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "< 32768", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), "< 32767.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), "< 32767", "<($t1, 32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "< 32766.1", "<=($t1, 32766)"), arguments(sqlType(SqlTypeName.SMALLINT), "<= -32769", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "<= -32768.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.SMALLINT), "<= -32768", "<=($t1, -32768)"), arguments(sqlType(SqlTypeName.SMALLINT), "<= -32767.1", "<($t1, -32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "<= 32768", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), "<= 32767.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.SMALLINT), "<= 32767", "<=($t1, 32767)"), arguments(sqlType(SqlTypeName.SMALLINT), "<= 32766.1", "<=($t1, 32766)"), // INTEGER arguments(sqlType(SqlTypeName.INTEGER), "= -2147483649", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "= -2147483648.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "= -2147483648", "=($t1, -2147483648)"), arguments(sqlType(SqlTypeName.INTEGER), "= -2147483647.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "= 2147483648", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "= 2147483647.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "= 2147483647", "=($t1, 2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "= 2147483646.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "IN (-2147483649, -2147483648.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "IN (-2147483648.1, -2147483647.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "IN (-2147483648.1, -2147483648, -2147483647.1)", "=($t1, -2147483648)"), arguments(sqlType(SqlTypeName.INTEGER), "IN (-2147483648.1, -2147483648, -2147483647.1, -2147483647)", "SEARCH($t1, Sarg[-2147483648.0, -2147483647.0])"), arguments(sqlType(SqlTypeName.INTEGER), "IN (2147483648, 2147483647.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "IN (2147483647.1, 2147483646.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "IN (2147483647.1, 2147483647, 2147483646.1)", "=($t1, 2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "IN (2147483647.1, 2147483647, 2147483646.1, 2147483646)", "SEARCH($t1, Sarg[2147483646.0, 2147483647.0])"), arguments(sqlType(SqlTypeName.INTEGER), ">= -2147483649", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), ">= -2147483648.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), ">= -2147483648", ">=($t1, -2147483648)"), arguments(sqlType(SqlTypeName.INTEGER), ">= -2147483647.1", ">=($t1, -2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), ">= 2147483648", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), ">= 2147483647.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), ">= 2147483647", ">=($t1, 2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), ">= 2147483646.1", ">($t1, 2147483646)"), arguments(sqlType(SqlTypeName.INTEGER), "> -2147483649", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), "> -2147483648.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), "> -2147483648", ">($t1, -2147483648)"), arguments(sqlType(SqlTypeName.INTEGER), "> -2147483647.1", ">=($t1, -2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "> 2147483648", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "> 2147483647.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "> 2147483647", ">($t1, 2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "> 2147483646.1", ">($t1, 2147483646)"), arguments(sqlType(SqlTypeName.INTEGER), "< -2147483649", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "< -2147483648.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "< -2147483648", "<($t1, -2147483648)"), arguments(sqlType(SqlTypeName.INTEGER), "< -2147483647.1", "<($t1, -2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "< 2147483648", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), "< 2147483647.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), "< 2147483647", "<($t1, 2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "< 2147483646.1", "<=($t1, 2147483646)"), arguments(sqlType(SqlTypeName.INTEGER), "<= -2147483649", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "<= -2147483648.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.INTEGER), "<= -2147483648", "<=($t1, -2147483648)"), arguments(sqlType(SqlTypeName.INTEGER), "<= -2147483647.1", "<($t1, -2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "<= 2147483648", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), "<= 2147483647.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.INTEGER), "<= 2147483647", "<=($t1, 2147483647)"), arguments(sqlType(SqlTypeName.INTEGER), "<= 2147483646.1", "<=($t1, 2147483646)"), // BIGINT arguments(sqlType(SqlTypeName.BIGINT), "= -9223372036854775809", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "= -9223372036854775808.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "= -9223372036854775808", "=($t1, -9223372036854775808)"), arguments(sqlType(SqlTypeName.BIGINT), "= -9223372036854775807.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "= 9223372036854775808", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "= 9223372036854775807.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "= 9223372036854775807", "=($t1, 9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "= 9223372036854775806.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "IN (-9223372036854775809, -9223372036854775808.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "IN (-9223372036854775808.1, -9223372036854775807.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "IN (-9223372036854775808.1, -9223372036854775808, -9223372036854775807.1)", "=($t1, -9223372036854775808)"), arguments(sqlType(SqlTypeName.BIGINT), "IN (-9223372036854775808.1, -9223372036854775808, -9223372036854775807.1, -9223372036854775807)", "SEARCH($t1, Sarg[-9223372036854775808L:BIGINT, -9223372036854775807L:BIGINT]:BIGINT)"), arguments(sqlType(SqlTypeName.BIGINT), "IN (9223372036854775808, 9223372036854775807.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "IN (9223372036854775807.1, 9223372036854775806.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "IN (9223372036854775807.1, 9223372036854775807, 9223372036854775806.1)", "=($t1, 9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "IN (9223372036854775807.1, 9223372036854775807, 9223372036854775806.1, 9223372036854775806)", "SEARCH($t1, Sarg[9223372036854775806L:BIGINT, 9223372036854775807L:BIGINT]:BIGINT)"), arguments(sqlType(SqlTypeName.BIGINT), ">= -9223372036854775809", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), ">= -9223372036854775808.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), ">= -9223372036854775808", ">=($t1, -9223372036854775808)"), arguments(sqlType(SqlTypeName.BIGINT), ">= -9223372036854775807.1", ">=($t1, -9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), ">= 9223372036854775808", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), ">= 9223372036854775807.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), ">= 9223372036854775807", ">=($t1, 9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), ">= 9223372036854775806.1", ">($t1, 9223372036854775806)"), arguments(sqlType(SqlTypeName.BIGINT), "> -9223372036854775809", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), "> -9223372036854775808.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), "> -9223372036854775808", ">($t1, -9223372036854775808)"), arguments(sqlType(SqlTypeName.BIGINT), "> -9223372036854775807.1", ">=($t1, -9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "> 9223372036854775808", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "> 9223372036854775807.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "> 9223372036854775807", ">($t1, 9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "> 9223372036854775806.1", ">($t1, 9223372036854775806)"), arguments(sqlType(SqlTypeName.BIGINT), "< -9223372036854775809", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "< -9223372036854775808.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "< -9223372036854775808", "<($t1, -9223372036854775808)"), arguments(sqlType(SqlTypeName.BIGINT), "< -9223372036854775807.1", "<($t1, -9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "< 9223372036854775808", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), "< 9223372036854775807.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), "< 9223372036854775807", "<($t1, 9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "< 9223372036854775806.1", "<=($t1, 9223372036854775806)"), arguments(sqlType(SqlTypeName.BIGINT), "<= -9223372036854775809", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "<= -9223372036854775808.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.BIGINT), "<= -9223372036854775808", "<=($t1, -9223372036854775808)"), arguments(sqlType(SqlTypeName.BIGINT), "<= -9223372036854775807.1", "<($t1, -9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "<= 9223372036854775808", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), "<= 9223372036854775807.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.BIGINT), "<= 9223372036854775807", "<=($t1, 9223372036854775807)"), arguments(sqlType(SqlTypeName.BIGINT), "<= 9223372036854775806.1", "<=($t1, 9223372036854775806)"), // DECIMAL(1,0) arguments(sqlType(SqlTypeName.DECIMAL, 1), "= -10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= -9.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= -9.0", "=($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= -9", "=($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= -8.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= 10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= 9.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= 9.0", "=($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= 9", "=($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "= 8.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (-10, -9.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (-9.1, -8.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (-9.1, -9, -8.1)", "=($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (-9.1, -9, -8.1, -8)", "SEARCH($t1, Sarg[-9.0:DECIMAL(1, 0), -8.0:DECIMAL(1, 0)]:DECIMAL(1, 0))"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (10, 9.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (9.1, 8.1)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (9.1, 9, 8.1)", "=($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "IN (9.1, 9, 8.1, 8)", "SEARCH($t1, Sarg[8.0:DECIMAL(1, 0), 9.0:DECIMAL(1, 0)]:DECIMAL(1, 0))"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= -10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= -9.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= -9.0", ">=($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= -9", ">=($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= -8.1", ">=($t1, -8)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= 10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= 9.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= 9.0", ">=($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= 9", ">=($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), ">= 8.1", ">($t1, 8)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> -10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> -9.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> -9.0", ">($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> -9", ">($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> -8.1", ">=($t1, -8)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> 10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> 9.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> 9.0", ">($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> 9", ">($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "> 8.1", ">($t1, 8)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< -10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< -9.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< -9.0", "<($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< -9", "<($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< -8.1", "<($t1, -8)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< 10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< 9.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< 9.0", "<($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< 9", "<($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "< 8.1", "<=($t1, 8)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= -10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= -9.1", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= -9.0", "<=($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= -9", "<=($t1, -9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= -8.1", "<($t1, -8)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= 10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= 9.1", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= 9.0", "<=($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= 9", "<=($t1, 9)"), arguments(sqlType(SqlTypeName.DECIMAL, 1), "<= 8.1", "<=($t1, 8)"), // DECIMAL(2,1) arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= -10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= -9.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= -9.90", "=($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= -9.9", "=($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= -8.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= 10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= 9.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= 9.90", "=($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= 9.9", "=($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "= 8.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (-10, -9.91)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (-9.91, -8.91)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (-9.91, -9.9, -8.91)", "=($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (-9.91, -9.9, -8.91, -8.9)", "SEARCH($t1, Sarg[-9.90:DECIMAL(2, 1), -8.90:DECIMAL(2, 1)]:DECIMAL(2, 1))"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (10, 9.91)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (9.91, 8.91)", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (9.91, 9.9, 8.91)", "=($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "IN (9.91, 9.9, 8.91, 8.9)", "SEARCH($t1, Sarg[8.90:DECIMAL(2, 1), 9.90:DECIMAL(2, 1)]:DECIMAL(2, 1))"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= -10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= -9.91", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= -9.90", ">=($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= -9.9", ">=($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= -8.91", ">=($t1, -8.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= 10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= 9.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= 9.90", ">=($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= 9.9", ">=($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), ">= 8.91", ">($t1, 8.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> -10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> -9.91", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> -9.90", ">($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> -9.9", ">($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> -8.91", ">=($t1, -8.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> 10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> 9.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> 9.90", ">($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> 9.9", ">($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "> 8.91", ">($t1, 8.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< -10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< -9.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< -9.90", "<($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< -9.9", "<($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< -8.91", "<($t1, -8.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< 10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< 9.91", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< 9.90", "<($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< 9.9", "<($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "< 8.91", "<=($t1, 8.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= -10", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= -9.91", "emptyValuesNode"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= -9.90", "<=($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= -9.9", "<=($t1, -9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= -8.91", "<($t1, -8.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= 10", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= 9.91", "IS NOT NULL($t1)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= 9.90", "<=($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= 9.9", "<=($t1, 9.9)"), arguments(sqlType(SqlTypeName.DECIMAL, 2, 1), "<= 8.91", "<=($t1, 8.9)") ); } private static RelDataType sqlType(SqlTypeName typeName) { return TYPE_FACTORY.createSqlType(typeName); } private static RelDataType sqlType(SqlTypeName typeName, int precision) { return TYPE_FACTORY.createSqlType(typeName, precision); } private static RelDataType sqlType(SqlTypeName typeName, int precision, int scale) { return TYPE_FACTORY.createSqlType(typeName, precision, scale); } private static Predicate<RelNode> emptyValuesNode() { return byClass(IgniteValues.class).and(node -> ((IgniteValues) node).tuples.isEmpty()); } private static Predicate<RelNode> expression(String expected) { return byClass(ProjectableFilterableTableScan.class) .and(node -> { var tableScan = (ProjectableFilterableTableScan) node; RexNode condition = tableScan.condition(); return condition != null && condition.toString().equals(expected); }); } private static UnaryOperator<TableBuilder> table(String tableName, String column, RelDataType type) { return tableBuilder -> tableBuilder .name(tableName) .addColumn("C1", NativeTypes.INT32) .addColumn(column, IgniteTypeFactory.relDataTypeToNative(type)) .size(400) .distribution(IgniteDistributions.single()); } }
googleapis/google-cloud-java
35,532
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/CreateContextRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2beta1/context.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2beta1; /** * * * <pre> * The request message for * [Contexts.CreateContext][google.cloud.dialogflow.v2beta1.Contexts.CreateContext]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.CreateContextRequest} */ public final class CreateContextRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.CreateContextRequest) CreateContextRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateContextRequest.newBuilder() to construct. private CreateContextRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateContextRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateContextRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ContextProto .internal_static_google_cloud_dialogflow_v2beta1_CreateContextRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ContextProto .internal_static_google_cloud_dialogflow_v2beta1_CreateContextRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.CreateContextRequest.class, com.google.cloud.dialogflow.v2beta1.CreateContextRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The session to create a context for. Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The session to create a context for. Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONTEXT_FIELD_NUMBER = 2; private com.google.cloud.dialogflow.v2beta1.Context context_; /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the context field is set. */ @java.lang.Override public boolean hasContext() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The context. */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.Context getContext() { return context_ == null ? com.google.cloud.dialogflow.v2beta1.Context.getDefaultInstance() : context_; } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ContextOrBuilder getContextOrBuilder() { return context_ == null ? com.google.cloud.dialogflow.v2beta1.Context.getDefaultInstance() : context_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getContext()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getContext()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.CreateContextRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2beta1.CreateContextRequest other = (com.google.cloud.dialogflow.v2beta1.CreateContextRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasContext() != other.hasContext()) return false; if (hasContext()) { if (!getContext().equals(other.getContext())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasContext()) { hash = (37 * hash) + CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getContext().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2beta1.CreateContextRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [Contexts.CreateContext][google.cloud.dialogflow.v2beta1.Contexts.CreateContext]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.CreateContextRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.CreateContextRequest) com.google.cloud.dialogflow.v2beta1.CreateContextRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ContextProto .internal_static_google_cloud_dialogflow_v2beta1_CreateContextRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ContextProto .internal_static_google_cloud_dialogflow_v2beta1_CreateContextRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.CreateContextRequest.class, com.google.cloud.dialogflow.v2beta1.CreateContextRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2beta1.CreateContextRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getContextFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; context_ = null; if (contextBuilder_ != null) { contextBuilder_.dispose(); contextBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2beta1.ContextProto .internal_static_google_cloud_dialogflow_v2beta1_CreateContextRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.CreateContextRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2beta1.CreateContextRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.CreateContextRequest build() { com.google.cloud.dialogflow.v2beta1.CreateContextRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.CreateContextRequest buildPartial() { com.google.cloud.dialogflow.v2beta1.CreateContextRequest result = new com.google.cloud.dialogflow.v2beta1.CreateContextRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.v2beta1.CreateContextRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.context_ = contextBuilder_ == null ? context_ : contextBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2beta1.CreateContextRequest) { return mergeFrom((com.google.cloud.dialogflow.v2beta1.CreateContextRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.CreateContextRequest other) { if (other == com.google.cloud.dialogflow.v2beta1.CreateContextRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasContext()) { mergeContext(other.getContext()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getContextFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The session to create a context for. Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The session to create a context for. Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The session to create a context for. Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The session to create a context for. Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The session to create a context for. Supported formats: * - `projects/&lt;Project ID&gt;/agent/sessions/&lt;Session ID&gt;, * - `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/sessions/&lt;Session * ID&gt;`, * - `projects/&lt;Project ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User * ID&gt;/sessions/&lt;Session ID&gt;`, * - `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/agent/environments/&lt;Environment ID&gt;/users/&lt;User ID&gt;/sessions/&lt;Session * ID&gt;`, * * If `Location ID` is not specified we assume default 'us' location. If * `Environment ID` is not specified, we assume default 'draft' environment. * If `User ID` is not specified, we assume default '-' user. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.dialogflow.v2beta1.Context context_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.Context, com.google.cloud.dialogflow.v2beta1.Context.Builder, com.google.cloud.dialogflow.v2beta1.ContextOrBuilder> contextBuilder_; /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the context field is set. */ public boolean hasContext() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The context. */ public com.google.cloud.dialogflow.v2beta1.Context getContext() { if (contextBuilder_ == null) { return context_ == null ? com.google.cloud.dialogflow.v2beta1.Context.getDefaultInstance() : context_; } else { return contextBuilder_.getMessage(); } } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setContext(com.google.cloud.dialogflow.v2beta1.Context value) { if (contextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } context_ = value; } else { contextBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setContext(com.google.cloud.dialogflow.v2beta1.Context.Builder builderForValue) { if (contextBuilder_ == null) { context_ = builderForValue.build(); } else { contextBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeContext(com.google.cloud.dialogflow.v2beta1.Context value) { if (contextBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && context_ != null && context_ != com.google.cloud.dialogflow.v2beta1.Context.getDefaultInstance()) { getContextBuilder().mergeFrom(value); } else { context_ = value; } } else { contextBuilder_.mergeFrom(value); } if (context_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearContext() { bitField0_ = (bitField0_ & ~0x00000002); context_ = null; if (contextBuilder_ != null) { contextBuilder_.dispose(); contextBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2beta1.Context.Builder getContextBuilder() { bitField0_ |= 0x00000002; onChanged(); return getContextFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2beta1.ContextOrBuilder getContextOrBuilder() { if (contextBuilder_ != null) { return contextBuilder_.getMessageOrBuilder(); } else { return context_ == null ? com.google.cloud.dialogflow.v2beta1.Context.getDefaultInstance() : context_; } } /** * * * <pre> * Required. The context to create. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Context context = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.Context, com.google.cloud.dialogflow.v2beta1.Context.Builder, com.google.cloud.dialogflow.v2beta1.ContextOrBuilder> getContextFieldBuilder() { if (contextBuilder_ == null) { contextBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.Context, com.google.cloud.dialogflow.v2beta1.Context.Builder, com.google.cloud.dialogflow.v2beta1.ContextOrBuilder>( getContext(), getParentForChildren(), isClean()); context_ = null; } return contextBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.CreateContextRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.CreateContextRequest) private static final com.google.cloud.dialogflow.v2beta1.CreateContextRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.CreateContextRequest(); } public static com.google.cloud.dialogflow.v2beta1.CreateContextRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateContextRequest> PARSER = new com.google.protobuf.AbstractParser<CreateContextRequest>() { @java.lang.Override public CreateContextRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateContextRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateContextRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.CreateContextRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,800
java-aiplatform/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPredictionServiceStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.aiplatform.v1.stub; import static com.google.cloud.aiplatform.v1.PredictionServiceClient.ListLocationsPagedResponse; import com.google.api.HttpBody; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.aiplatform.v1.DirectPredictRequest; import com.google.cloud.aiplatform.v1.DirectPredictResponse; import com.google.cloud.aiplatform.v1.DirectRawPredictRequest; import com.google.cloud.aiplatform.v1.DirectRawPredictResponse; import com.google.cloud.aiplatform.v1.ExplainRequest; import com.google.cloud.aiplatform.v1.ExplainResponse; import com.google.cloud.aiplatform.v1.GenerateContentRequest; import com.google.cloud.aiplatform.v1.GenerateContentResponse; import com.google.cloud.aiplatform.v1.PredictRequest; import com.google.cloud.aiplatform.v1.PredictResponse; import com.google.cloud.aiplatform.v1.RawPredictRequest; import com.google.cloud.aiplatform.v1.StreamDirectPredictRequest; import com.google.cloud.aiplatform.v1.StreamDirectPredictResponse; import com.google.cloud.aiplatform.v1.StreamDirectRawPredictRequest; import com.google.cloud.aiplatform.v1.StreamDirectRawPredictResponse; import com.google.cloud.aiplatform.v1.StreamRawPredictRequest; import com.google.cloud.aiplatform.v1.StreamingPredictRequest; import com.google.cloud.aiplatform.v1.StreamingPredictResponse; import com.google.cloud.aiplatform.v1.StreamingRawPredictRequest; import com.google.cloud.aiplatform.v1.StreamingRawPredictResponse; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the PredictionService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcPredictionServiceStub extends PredictionServiceStub { private static final MethodDescriptor<PredictRequest, PredictResponse> predictMethodDescriptor = MethodDescriptor.<PredictRequest, PredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/Predict") .setRequestMarshaller(ProtoUtils.marshaller(PredictRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(PredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<RawPredictRequest, HttpBody> rawPredictMethodDescriptor = MethodDescriptor.<RawPredictRequest, HttpBody>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/RawPredict") .setRequestMarshaller(ProtoUtils.marshaller(RawPredictRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(HttpBody.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<StreamRawPredictRequest, HttpBody> streamRawPredictMethodDescriptor = MethodDescriptor.<StreamRawPredictRequest, HttpBody>newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/StreamRawPredict") .setRequestMarshaller( ProtoUtils.marshaller(StreamRawPredictRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(HttpBody.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DirectPredictRequest, DirectPredictResponse> directPredictMethodDescriptor = MethodDescriptor.<DirectPredictRequest, DirectPredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/DirectPredict") .setRequestMarshaller( ProtoUtils.marshaller(DirectPredictRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(DirectPredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DirectRawPredictRequest, DirectRawPredictResponse> directRawPredictMethodDescriptor = MethodDescriptor.<DirectRawPredictRequest, DirectRawPredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/DirectRawPredict") .setRequestMarshaller( ProtoUtils.marshaller(DirectRawPredictRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(DirectRawPredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<StreamDirectPredictRequest, StreamDirectPredictResponse> streamDirectPredictMethodDescriptor = MethodDescriptor.<StreamDirectPredictRequest, StreamDirectPredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/StreamDirectPredict") .setRequestMarshaller( ProtoUtils.marshaller(StreamDirectPredictRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StreamDirectPredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor< StreamDirectRawPredictRequest, StreamDirectRawPredictResponse> streamDirectRawPredictMethodDescriptor = MethodDescriptor .<StreamDirectRawPredictRequest, StreamDirectRawPredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName( "google.cloud.aiplatform.v1.PredictionService/StreamDirectRawPredict") .setRequestMarshaller( ProtoUtils.marshaller(StreamDirectRawPredictRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StreamDirectRawPredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<StreamingPredictRequest, StreamingPredictResponse> streamingPredictMethodDescriptor = MethodDescriptor.<StreamingPredictRequest, StreamingPredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/StreamingPredict") .setRequestMarshaller( ProtoUtils.marshaller(StreamingPredictRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StreamingPredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<StreamingPredictRequest, StreamingPredictResponse> serverStreamingPredictMethodDescriptor = MethodDescriptor.<StreamingPredictRequest, StreamingPredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName( "google.cloud.aiplatform.v1.PredictionService/ServerStreamingPredict") .setRequestMarshaller( ProtoUtils.marshaller(StreamingPredictRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StreamingPredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<StreamingRawPredictRequest, StreamingRawPredictResponse> streamingRawPredictMethodDescriptor = MethodDescriptor.<StreamingRawPredictRequest, StreamingRawPredictResponse>newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/StreamingRawPredict") .setRequestMarshaller( ProtoUtils.marshaller(StreamingRawPredictRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StreamingRawPredictResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ExplainRequest, ExplainResponse> explainMethodDescriptor = MethodDescriptor.<ExplainRequest, ExplainResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/Explain") .setRequestMarshaller(ProtoUtils.marshaller(ExplainRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ExplainResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GenerateContentRequest, GenerateContentResponse> generateContentMethodDescriptor = MethodDescriptor.<GenerateContentRequest, GenerateContentResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.aiplatform.v1.PredictionService/GenerateContent") .setRequestMarshaller( ProtoUtils.marshaller(GenerateContentRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(GenerateContentResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GenerateContentRequest, GenerateContentResponse> streamGenerateContentMethodDescriptor = MethodDescriptor.<GenerateContentRequest, GenerateContentResponse>newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName( "google.cloud.aiplatform.v1.PredictionService/StreamGenerateContent") .setRequestMarshaller( ProtoUtils.marshaller(GenerateContentRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(GenerateContentResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/ListLocations") .setRequestMarshaller( ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = MethodDescriptor.<GetLocationRequest, Location>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/GetLocation") .setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor = MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/SetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor = MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/GetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsMethodDescriptor = MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/TestIamPermissions") .setRequestMarshaller( ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private final UnaryCallable<PredictRequest, PredictResponse> predictCallable; private final UnaryCallable<RawPredictRequest, HttpBody> rawPredictCallable; private final ServerStreamingCallable<StreamRawPredictRequest, HttpBody> streamRawPredictCallable; private final UnaryCallable<DirectPredictRequest, DirectPredictResponse> directPredictCallable; private final UnaryCallable<DirectRawPredictRequest, DirectRawPredictResponse> directRawPredictCallable; private final BidiStreamingCallable<StreamDirectPredictRequest, StreamDirectPredictResponse> streamDirectPredictCallable; private final BidiStreamingCallable<StreamDirectRawPredictRequest, StreamDirectRawPredictResponse> streamDirectRawPredictCallable; private final BidiStreamingCallable<StreamingPredictRequest, StreamingPredictResponse> streamingPredictCallable; private final ServerStreamingCallable<StreamingPredictRequest, StreamingPredictResponse> serverStreamingPredictCallable; private final BidiStreamingCallable<StreamingRawPredictRequest, StreamingRawPredictResponse> streamingRawPredictCallable; private final UnaryCallable<ExplainRequest, ExplainResponse> explainCallable; private final UnaryCallable<GenerateContentRequest, GenerateContentResponse> generateContentCallable; private final ServerStreamingCallable<GenerateContentRequest, GenerateContentResponse> streamGenerateContentCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcPredictionServiceStub create(PredictionServiceStubSettings settings) throws IOException { return new GrpcPredictionServiceStub(settings, ClientContext.create(settings)); } public static final GrpcPredictionServiceStub create(ClientContext clientContext) throws IOException { return new GrpcPredictionServiceStub( PredictionServiceStubSettings.newBuilder().build(), clientContext); } public static final GrpcPredictionServiceStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcPredictionServiceStub( PredictionServiceStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcPredictionServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcPredictionServiceStub( PredictionServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcPredictionServiceCallableFactory()); } /** * Constructs an instance of GrpcPredictionServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcPredictionServiceStub( PredictionServiceStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<PredictRequest, PredictResponse> predictTransportSettings = GrpcCallSettings.<PredictRequest, PredictResponse>newBuilder() .setMethodDescriptor(predictMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("endpoint", String.valueOf(request.getEndpoint())); return builder.build(); }) .build(); GrpcCallSettings<RawPredictRequest, HttpBody> rawPredictTransportSettings = GrpcCallSettings.<RawPredictRequest, HttpBody>newBuilder() .setMethodDescriptor(rawPredictMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("endpoint", String.valueOf(request.getEndpoint())); return builder.build(); }) .build(); GrpcCallSettings<StreamRawPredictRequest, HttpBody> streamRawPredictTransportSettings = GrpcCallSettings.<StreamRawPredictRequest, HttpBody>newBuilder() .setMethodDescriptor(streamRawPredictMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("endpoint", String.valueOf(request.getEndpoint())); return builder.build(); }) .build(); GrpcCallSettings<DirectPredictRequest, DirectPredictResponse> directPredictTransportSettings = GrpcCallSettings.<DirectPredictRequest, DirectPredictResponse>newBuilder() .setMethodDescriptor(directPredictMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("endpoint", String.valueOf(request.getEndpoint())); return builder.build(); }) .build(); GrpcCallSettings<DirectRawPredictRequest, DirectRawPredictResponse> directRawPredictTransportSettings = GrpcCallSettings.<DirectRawPredictRequest, DirectRawPredictResponse>newBuilder() .setMethodDescriptor(directRawPredictMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("endpoint", String.valueOf(request.getEndpoint())); return builder.build(); }) .build(); GrpcCallSettings<StreamDirectPredictRequest, StreamDirectPredictResponse> streamDirectPredictTransportSettings = GrpcCallSettings.<StreamDirectPredictRequest, StreamDirectPredictResponse>newBuilder() .setMethodDescriptor(streamDirectPredictMethodDescriptor) .build(); GrpcCallSettings<StreamDirectRawPredictRequest, StreamDirectRawPredictResponse> streamDirectRawPredictTransportSettings = GrpcCallSettings .<StreamDirectRawPredictRequest, StreamDirectRawPredictResponse>newBuilder() .setMethodDescriptor(streamDirectRawPredictMethodDescriptor) .build(); GrpcCallSettings<StreamingPredictRequest, StreamingPredictResponse> streamingPredictTransportSettings = GrpcCallSettings.<StreamingPredictRequest, StreamingPredictResponse>newBuilder() .setMethodDescriptor(streamingPredictMethodDescriptor) .build(); GrpcCallSettings<StreamingPredictRequest, StreamingPredictResponse> serverStreamingPredictTransportSettings = GrpcCallSettings.<StreamingPredictRequest, StreamingPredictResponse>newBuilder() .setMethodDescriptor(serverStreamingPredictMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("endpoint", String.valueOf(request.getEndpoint())); return builder.build(); }) .build(); GrpcCallSettings<StreamingRawPredictRequest, StreamingRawPredictResponse> streamingRawPredictTransportSettings = GrpcCallSettings.<StreamingRawPredictRequest, StreamingRawPredictResponse>newBuilder() .setMethodDescriptor(streamingRawPredictMethodDescriptor) .build(); GrpcCallSettings<ExplainRequest, ExplainResponse> explainTransportSettings = GrpcCallSettings.<ExplainRequest, ExplainResponse>newBuilder() .setMethodDescriptor(explainMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("endpoint", String.valueOf(request.getEndpoint())); return builder.build(); }) .build(); GrpcCallSettings<GenerateContentRequest, GenerateContentResponse> generateContentTransportSettings = GrpcCallSettings.<GenerateContentRequest, GenerateContentResponse>newBuilder() .setMethodDescriptor(generateContentMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("model", String.valueOf(request.getModel())); return builder.build(); }) .build(); GrpcCallSettings<GenerateContentRequest, GenerateContentResponse> streamGenerateContentTransportSettings = GrpcCallSettings.<GenerateContentRequest, GenerateContentResponse>newBuilder() .setMethodDescriptor(streamGenerateContentMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("model", String.valueOf(request.getModel())); return builder.build(); }) .build(); GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings = GrpcCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings = GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings = GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsTransportSettings = GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); this.predictCallable = callableFactory.createUnaryCallable( predictTransportSettings, settings.predictSettings(), clientContext); this.rawPredictCallable = callableFactory.createUnaryCallable( rawPredictTransportSettings, settings.rawPredictSettings(), clientContext); this.streamRawPredictCallable = callableFactory.createServerStreamingCallable( streamRawPredictTransportSettings, settings.streamRawPredictSettings(), clientContext); this.directPredictCallable = callableFactory.createUnaryCallable( directPredictTransportSettings, settings.directPredictSettings(), clientContext); this.directRawPredictCallable = callableFactory.createUnaryCallable( directRawPredictTransportSettings, settings.directRawPredictSettings(), clientContext); this.streamDirectPredictCallable = callableFactory.createBidiStreamingCallable( streamDirectPredictTransportSettings, settings.streamDirectPredictSettings(), clientContext); this.streamDirectRawPredictCallable = callableFactory.createBidiStreamingCallable( streamDirectRawPredictTransportSettings, settings.streamDirectRawPredictSettings(), clientContext); this.streamingPredictCallable = callableFactory.createBidiStreamingCallable( streamingPredictTransportSettings, settings.streamingPredictSettings(), clientContext); this.serverStreamingPredictCallable = callableFactory.createServerStreamingCallable( serverStreamingPredictTransportSettings, settings.serverStreamingPredictSettings(), clientContext); this.streamingRawPredictCallable = callableFactory.createBidiStreamingCallable( streamingRawPredictTransportSettings, settings.streamingRawPredictSettings(), clientContext); this.explainCallable = callableFactory.createUnaryCallable( explainTransportSettings, settings.explainSettings(), clientContext); this.generateContentCallable = callableFactory.createUnaryCallable( generateContentTransportSettings, settings.generateContentSettings(), clientContext); this.streamGenerateContentCallable = callableFactory.createServerStreamingCallable( streamGenerateContentTransportSettings, settings.streamGenerateContentSettings(), clientContext); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<PredictRequest, PredictResponse> predictCallable() { return predictCallable; } @Override public UnaryCallable<RawPredictRequest, HttpBody> rawPredictCallable() { return rawPredictCallable; } @Override public ServerStreamingCallable<StreamRawPredictRequest, HttpBody> streamRawPredictCallable() { return streamRawPredictCallable; } @Override public UnaryCallable<DirectPredictRequest, DirectPredictResponse> directPredictCallable() { return directPredictCallable; } @Override public UnaryCallable<DirectRawPredictRequest, DirectRawPredictResponse> directRawPredictCallable() { return directRawPredictCallable; } @Override public BidiStreamingCallable<StreamDirectPredictRequest, StreamDirectPredictResponse> streamDirectPredictCallable() { return streamDirectPredictCallable; } @Override public BidiStreamingCallable<StreamDirectRawPredictRequest, StreamDirectRawPredictResponse> streamDirectRawPredictCallable() { return streamDirectRawPredictCallable; } @Override public BidiStreamingCallable<StreamingPredictRequest, StreamingPredictResponse> streamingPredictCallable() { return streamingPredictCallable; } @Override public ServerStreamingCallable<StreamingPredictRequest, StreamingPredictResponse> serverStreamingPredictCallable() { return serverStreamingPredictCallable; } @Override public BidiStreamingCallable<StreamingRawPredictRequest, StreamingRawPredictResponse> streamingRawPredictCallable() { return streamingRawPredictCallable; } @Override public UnaryCallable<ExplainRequest, ExplainResponse> explainCallable() { return explainCallable; } @Override public UnaryCallable<GenerateContentRequest, GenerateContentResponse> generateContentCallable() { return generateContentCallable; } @Override public ServerStreamingCallable<GenerateContentRequest, GenerateContentResponse> streamGenerateContentCallable() { return streamGenerateContentCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-cloud-java
35,503
java-enterpriseknowledgegraph/proto-google-cloud-enterpriseknowledgegraph-v1/src/main/java/com/google/cloud/enterpriseknowledgegraph/v1/CreateEntityReconciliationJobRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/enterpriseknowledgegraph/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.enterpriseknowledgegraph.v1; /** * * * <pre> * Request message for CreateEntityReconciliationJob. * </pre> * * Protobuf type {@code * google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest} */ public final class CreateEntityReconciliationJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest) CreateEntityReconciliationJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateEntityReconciliationJobRequest.newBuilder() to construct. private CreateEntityReconciliationJobRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateEntityReconciliationJobRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateEntityReconciliationJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.enterpriseknowledgegraph.v1.ServiceProto .internal_static_google_cloud_enterpriseknowledgegraph_v1_CreateEntityReconciliationJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.enterpriseknowledgegraph.v1.ServiceProto .internal_static_google_cloud_enterpriseknowledgegraph_v1_CreateEntityReconciliationJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest.class, com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest .Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location to create the * EntityReconciliationJob in. Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Location to create the * EntityReconciliationJob in. Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENTITY_RECONCILIATION_JOB_FIELD_NUMBER = 2; private com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entityReconciliationJob_; /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entityReconciliationJob field is set. */ @java.lang.Override public boolean hasEntityReconciliationJob() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entityReconciliationJob. */ @java.lang.Override public com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob getEntityReconciliationJob() { return entityReconciliationJob_ == null ? com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob.getDefaultInstance() : entityReconciliationJob_; } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJobOrBuilder getEntityReconciliationJobOrBuilder() { return entityReconciliationJob_ == null ? com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob.getDefaultInstance() : entityReconciliationJob_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getEntityReconciliationJob()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEntityReconciliationJob()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest)) { return super.equals(obj); } com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest other = (com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasEntityReconciliationJob() != other.hasEntityReconciliationJob()) return false; if (hasEntityReconciliationJob()) { if (!getEntityReconciliationJob().equals(other.getEntityReconciliationJob())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasEntityReconciliationJob()) { hash = (37 * hash) + ENTITY_RECONCILIATION_JOB_FIELD_NUMBER; hash = (53 * hash) + getEntityReconciliationJob().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for CreateEntityReconciliationJob. * </pre> * * Protobuf type {@code * google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest) com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.enterpriseknowledgegraph.v1.ServiceProto .internal_static_google_cloud_enterpriseknowledgegraph_v1_CreateEntityReconciliationJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.enterpriseknowledgegraph.v1.ServiceProto .internal_static_google_cloud_enterpriseknowledgegraph_v1_CreateEntityReconciliationJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest .class, com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest .Builder.class); } // Construct using // com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEntityReconciliationJobFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; entityReconciliationJob_ = null; if (entityReconciliationJobBuilder_ != null) { entityReconciliationJobBuilder_.dispose(); entityReconciliationJobBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.enterpriseknowledgegraph.v1.ServiceProto .internal_static_google_cloud_enterpriseknowledgegraph_v1_CreateEntityReconciliationJobRequest_descriptor; } @java.lang.Override public com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest getDefaultInstanceForType() { return com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest build() { com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest buildPartial() { com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest result = new com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest( this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.entityReconciliationJob_ = entityReconciliationJobBuilder_ == null ? entityReconciliationJob_ : entityReconciliationJobBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest) { return mergeFrom( (com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest other) { if (other == com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasEntityReconciliationJob()) { mergeEntityReconciliationJob(other.getEntityReconciliationJob()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getEntityReconciliationJobFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location to create the * EntityReconciliationJob in. Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Location to create the * EntityReconciliationJob in. Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Location to create the * EntityReconciliationJob in. Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location to create the * EntityReconciliationJob in. Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location to create the * EntityReconciliationJob in. Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entityReconciliationJob_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob, com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob.Builder, com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJobOrBuilder> entityReconciliationJobBuilder_; /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entityReconciliationJob field is set. */ public boolean hasEntityReconciliationJob() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entityReconciliationJob. */ public com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob getEntityReconciliationJob() { if (entityReconciliationJobBuilder_ == null) { return entityReconciliationJob_ == null ? com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob .getDefaultInstance() : entityReconciliationJob_; } else { return entityReconciliationJobBuilder_.getMessage(); } } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntityReconciliationJob( com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob value) { if (entityReconciliationJobBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entityReconciliationJob_ = value; } else { entityReconciliationJobBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntityReconciliationJob( com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob.Builder builderForValue) { if (entityReconciliationJobBuilder_ == null) { entityReconciliationJob_ = builderForValue.build(); } else { entityReconciliationJobBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeEntityReconciliationJob( com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob value) { if (entityReconciliationJobBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && entityReconciliationJob_ != null && entityReconciliationJob_ != com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob .getDefaultInstance()) { getEntityReconciliationJobBuilder().mergeFrom(value); } else { entityReconciliationJob_ = value; } } else { entityReconciliationJobBuilder_.mergeFrom(value); } if (entityReconciliationJob_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearEntityReconciliationJob() { bitField0_ = (bitField0_ & ~0x00000002); entityReconciliationJob_ = null; if (entityReconciliationJobBuilder_ != null) { entityReconciliationJobBuilder_.dispose(); entityReconciliationJobBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob.Builder getEntityReconciliationJobBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEntityReconciliationJobFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJobOrBuilder getEntityReconciliationJobOrBuilder() { if (entityReconciliationJobBuilder_ != null) { return entityReconciliationJobBuilder_.getMessageOrBuilder(); } else { return entityReconciliationJob_ == null ? com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob .getDefaultInstance() : entityReconciliationJob_; } } /** * * * <pre> * Required. The EntityReconciliationJob to create. * </pre> * * <code> * .google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob entity_reconciliation_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob, com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob.Builder, com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJobOrBuilder> getEntityReconciliationJobFieldBuilder() { if (entityReconciliationJobBuilder_ == null) { entityReconciliationJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob, com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJob.Builder, com.google.cloud.enterpriseknowledgegraph.v1.EntityReconciliationJobOrBuilder>( getEntityReconciliationJob(), getParentForChildren(), isClean()); entityReconciliationJob_ = null; } return entityReconciliationJobBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest) } // @@protoc_insertion_point(class_scope:google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest) private static final com.google.cloud.enterpriseknowledgegraph.v1 .CreateEntityReconciliationJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest(); } public static com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateEntityReconciliationJobRequest> PARSER = new com.google.protobuf.AbstractParser<CreateEntityReconciliationJobRequest>() { @java.lang.Override public CreateEntityReconciliationJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateEntityReconciliationJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateEntityReconciliationJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.enterpriseknowledgegraph.v1.CreateEntityReconciliationJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
35,537
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.LinkedHashMap; import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RpcNoSuchMethodException; import org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager; import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.client.api.SharedCacheClient; import org.apache.hadoop.yarn.exceptions.YarnException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.VisibleForTesting; /** * This class is responsible for uploading resources from the client to HDFS * that are associated with a MapReduce job. */ @Private @Unstable class JobResourceUploader { protected static final Logger LOG = LoggerFactory.getLogger(JobResourceUploader.class); private static final String ROOT_PATH = "/"; private final boolean useWildcard; private final FileSystem jtFs; private SharedCacheClient scClient = null; private SharedCacheConfig scConfig = new SharedCacheConfig(); private ApplicationId appId = null; JobResourceUploader(FileSystem submitFs, boolean useWildcard) { this.jtFs = submitFs; this.useWildcard = useWildcard; } private void initSharedCache(JobID jobid, Configuration conf) { this.scConfig.init(conf); if (this.scConfig.isSharedCacheEnabled()) { this.scClient = createSharedCacheClient(conf); appId = jobIDToAppId(jobid); } } /* * We added this method so that we could do the conversion between JobId and * ApplicationId for the shared cache client. This logic is very similar to * the org.apache.hadoop.mapreduce.TypeConverter#toYarn method. We don't use * that because mapreduce-client-core can not depend on * mapreduce-client-common. */ private ApplicationId jobIDToAppId(JobID jobId) { return ApplicationId.newInstance(Long.parseLong(jobId.getJtIdentifier()), jobId.getId()); } private void stopSharedCache() { if (scClient != null) { scClient.stop(); scClient = null; } } /** * Create, initialize and start a new shared cache client. */ @VisibleForTesting protected SharedCacheClient createSharedCacheClient(Configuration conf) { SharedCacheClient scc = SharedCacheClient.createSharedCacheClient(); scc.init(conf); scc.start(); return scc; } /** * Upload and configure files, libjars, jobjars, and archives pertaining to * the passed job. * <p> * This client will use the shared cache for libjars, files, archives and * jobjars if it is enabled. When shared cache is enabled, it will try to use * the shared cache and fall back to the default behavior when the scm isn't * available. * <p> * 1. For the resources that have been successfully shared, we will continue * to use them in a shared fashion. * <p> * 2. For the resources that weren't in the cache and need to be uploaded by * NM, we won't ask NM to upload them. * * @param job the job containing the files to be uploaded * @param submitJobDir the submission directory of the job * @throws IOException */ public void uploadResources(Job job, Path submitJobDir) throws IOException { try { initSharedCache(job.getJobID(), job.getConfiguration()); uploadResourcesInternal(job, submitJobDir); } finally { stopSharedCache(); } } private void uploadResourcesInternal(Job job, Path submitJobDir) throws IOException { Configuration conf = job.getConfiguration(); short replication = (short) conf.getInt(Job.SUBMIT_REPLICATION, Job.DEFAULT_SUBMIT_REPLICATION); if (!(conf.getBoolean(Job.USED_GENERIC_PARSER, false))) { LOG.warn("Hadoop command-line option parsing not performed. " + "Implement the Tool interface and execute your application " + "with ToolRunner to remedy this."); } // // Figure out what fs the JobTracker is using. Copy the // job to it, under a temporary name. This allows DFS to work, // and under the local fs also provides UNIX-like object loading // semantics. (that is, if the job file is deleted right after // submission, we can still run the submission to completion) // // Create a number of filenames in the JobTracker's fs namespace LOG.debug("default FileSystem: " + jtFs.getUri()); if (jtFs.exists(submitJobDir)) { throw new IOException("Not submitting job. Job directory " + submitJobDir + " already exists!! This is unexpected.Please check what's there in" + " that directory"); } // Create the submission directory for the MapReduce job. submitJobDir = jtFs.makeQualified(submitJobDir); submitJobDir = new Path(submitJobDir.toUri().getPath()); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); mkdirs(jtFs, submitJobDir, mapredSysPerms); if (!conf.getBoolean(MRJobConfig.MR_AM_STAGING_DIR_ERASURECODING_ENABLED, MRJobConfig.DEFAULT_MR_AM_STAGING_ERASURECODING_ENABLED)) { disableErasureCodingForPath(submitJobDir); } // Get the resources that have been added via command line arguments in the // GenericOptionsParser (i.e. files, libjars, archives). Collection<String> files = conf.getStringCollection("tmpfiles"); Collection<String> libjars = conf.getStringCollection("tmpjars"); Collection<String> archives = conf.getStringCollection("tmparchives"); String jobJar = job.getJar(); // Merge resources that have been programmatically specified for the shared // cache via the Job API. files.addAll(conf.getStringCollection(MRJobConfig.FILES_FOR_SHARED_CACHE)); libjars.addAll(conf.getStringCollection( MRJobConfig.FILES_FOR_CLASSPATH_AND_SHARED_CACHE)); archives.addAll(conf .getStringCollection(MRJobConfig.ARCHIVES_FOR_SHARED_CACHE)); Map<URI, FileStatus> statCache = new HashMap<URI, FileStatus>(); checkLocalizationLimits(conf, files, libjars, archives, jobJar, statCache); Map<String, Boolean> fileSCUploadPolicies = new LinkedHashMap<String, Boolean>(); Map<String, Boolean> archiveSCUploadPolicies = new LinkedHashMap<String, Boolean>(); uploadFiles(job, files, submitJobDir, mapredSysPerms, replication, fileSCUploadPolicies, statCache); uploadLibJars(job, libjars, submitJobDir, mapredSysPerms, replication, fileSCUploadPolicies, statCache); uploadArchives(job, archives, submitJobDir, mapredSysPerms, replication, archiveSCUploadPolicies, statCache); uploadJobJar(job, jobJar, submitJobDir, replication, statCache); addLog4jToDistributedCache(job, submitJobDir); // Note, we do not consider resources in the distributed cache for the // shared cache at this time. Only resources specified via the // GenericOptionsParser or the jobjar. Job.setFileSharedCacheUploadPolicies(conf, fileSCUploadPolicies); Job.setArchiveSharedCacheUploadPolicies(conf, archiveSCUploadPolicies); // set the timestamps of the archives and files // set the public/private visibility of the archives and files ClientDistributedCacheManager.determineTimestampsAndCacheVisibilities(conf, statCache); // get DelegationToken for cached file ClientDistributedCacheManager.getDelegationTokens(conf, job.getCredentials()); } @VisibleForTesting void uploadFiles(Job job, Collection<String> files, Path submitJobDir, FsPermission mapredSysPerms, short submitReplication, Map<String, Boolean> fileSCUploadPolicies, Map<URI, FileStatus> statCache) throws IOException { Configuration conf = job.getConfiguration(); Path filesDir = JobSubmissionFiles.getJobDistCacheFiles(submitJobDir); if (!files.isEmpty()) { mkdirs(jtFs, filesDir, mapredSysPerms); for (String tmpFile : files) { URI tmpURI = null; try { tmpURI = new URI(tmpFile); } catch (URISyntaxException e) { throw new IllegalArgumentException("Error parsing files argument." + " Argument must be a valid URI: " + tmpFile, e); } Path tmp = new Path(tmpURI); URI newURI = null; boolean uploadToSharedCache = false; if (scConfig.isSharedCacheFilesEnabled()) { newURI = useSharedCache(tmpURI, tmp.getName(), statCache, conf, true); if (newURI == null) { uploadToSharedCache = true; } } if (newURI == null) { Path newPath = copyRemoteFiles(filesDir, tmp, conf, submitReplication); try { newURI = getPathURI(newPath, tmpURI.getFragment()); } catch (URISyntaxException ue) { // should not throw a uri exception throw new IOException( "Failed to create a URI (URISyntaxException) for the" + " remote path " + newPath + ". This was based on the files parameter: " + tmpFile, ue); } } job.addCacheFile(newURI); if (scConfig.isSharedCacheFilesEnabled()) { fileSCUploadPolicies.put(newURI.toString(), uploadToSharedCache); } } } } // Suppress warning for use of DistributedCache (it is everywhere). @SuppressWarnings("deprecation") @VisibleForTesting void uploadLibJars(Job job, Collection<String> libjars, Path submitJobDir, FsPermission mapredSysPerms, short submitReplication, Map<String, Boolean> fileSCUploadPolicies, Map<URI, FileStatus> statCache) throws IOException { Configuration conf = job.getConfiguration(); Path libjarsDir = JobSubmissionFiles.getJobDistCacheLibjars(submitJobDir); if (!libjars.isEmpty()) { mkdirs(jtFs, libjarsDir, mapredSysPerms); Collection<URI> libjarURIs = new LinkedList<>(); boolean foundFragment = false; for (String tmpjars : libjars) { URI tmpURI = null; try { tmpURI = new URI(tmpjars); } catch (URISyntaxException e) { throw new IllegalArgumentException("Error parsing libjars argument." + " Argument must be a valid URI: " + tmpjars, e); } Path tmp = new Path(tmpURI); URI newURI = null; boolean uploadToSharedCache = false; boolean fromSharedCache = false; if (scConfig.isSharedCacheLibjarsEnabled()) { newURI = useSharedCache(tmpURI, tmp.getName(), statCache, conf, true); if (newURI == null) { uploadToSharedCache = true; } else { fromSharedCache = true; } } if (newURI == null) { Path newPath = copyRemoteFiles(libjarsDir, tmp, conf, submitReplication); try { newURI = getPathURI(newPath, tmpURI.getFragment()); } catch (URISyntaxException ue) { // should not throw a uri exception throw new IOException( "Failed to create a URI (URISyntaxException) for the" + " remote path " + newPath + ". This was based on the libjar parameter: " + tmpjars, ue); } } if (!foundFragment) { // We do not count shared cache paths containing fragments as a // "foundFragment." This is because these resources are not in the // staging directory and will be added to the distributed cache // separately. foundFragment = (newURI.getFragment() != null) && !fromSharedCache; } Job.addFileToClassPath(new Path(newURI.getPath()), conf, jtFs, false); if (fromSharedCache) { // We simply add this URI to the distributed cache. It will not come // from the staging directory (it is in the shared cache), so we // must add it to the cache regardless of the wildcard feature. Job.addCacheFile(newURI, conf); } else { libjarURIs.add(newURI); } if (scConfig.isSharedCacheLibjarsEnabled()) { fileSCUploadPolicies.put(newURI.toString(), uploadToSharedCache); } } if (useWildcard && !foundFragment) { // Add the whole directory to the cache using a wild card Path libJarsDirWildcard = jtFs.makeQualified(new Path(libjarsDir, DistributedCache.WILDCARD)); Job.addCacheFile(libJarsDirWildcard.toUri(), conf); } else { for (URI uri : libjarURIs) { Job.addCacheFile(uri, conf); } } } } @VisibleForTesting void uploadArchives(Job job, Collection<String> archives, Path submitJobDir, FsPermission mapredSysPerms, short submitReplication, Map<String, Boolean> archiveSCUploadPolicies, Map<URI, FileStatus> statCache) throws IOException { Configuration conf = job.getConfiguration(); Path archivesDir = JobSubmissionFiles.getJobDistCacheArchives(submitJobDir); if (!archives.isEmpty()) { mkdirs(jtFs, archivesDir, mapredSysPerms); for (String tmpArchives : archives) { URI tmpURI; try { tmpURI = new URI(tmpArchives); } catch (URISyntaxException e) { throw new IllegalArgumentException("Error parsing archives argument." + " Argument must be a valid URI: " + tmpArchives, e); } Path tmp = new Path(tmpURI); URI newURI = null; boolean uploadToSharedCache = false; if (scConfig.isSharedCacheArchivesEnabled()) { newURI = useSharedCache(tmpURI, tmp.getName(), statCache, conf, true); if (newURI == null) { uploadToSharedCache = true; } } if (newURI == null) { Path newPath = copyRemoteFiles(archivesDir, tmp, conf, submitReplication); try { newURI = getPathURI(newPath, tmpURI.getFragment()); } catch (URISyntaxException ue) { // should not throw a uri exception throw new IOException( "Failed to create a URI (URISyntaxException) for the" + " remote path " + newPath + ". This was based on the archive parameter: " + tmpArchives, ue); } } job.addCacheArchive(newURI); if (scConfig.isSharedCacheArchivesEnabled()) { archiveSCUploadPolicies.put(newURI.toString(), uploadToSharedCache); } } } } @VisibleForTesting void uploadJobJar(Job job, String jobJar, Path submitJobDir, short submitReplication, Map<URI, FileStatus> statCache) throws IOException { Configuration conf = job.getConfiguration(); if (jobJar != null) { // copy jar to JobTracker's fs // use jar name if job is not named. if ("".equals(job.getJobName())) { job.setJobName(new Path(jobJar).getName()); } Path jobJarPath = new Path(jobJar); URI jobJarURI = jobJarPath.toUri(); Path newJarPath = null; boolean uploadToSharedCache = false; if (jobJarURI.getScheme() == null || jobJarURI.getScheme().equals("file")) { // job jar is on the local file system if (scConfig.isSharedCacheJobjarEnabled()) { // We must have a qualified path for the shared cache client. We can // assume this is for the local filesystem jobJarPath = FileSystem.getLocal(conf).makeQualified(jobJarPath); // Don't add a resource name here because the resource name (i.e. // job.jar directory symlink) will always be hard coded to job.jar for // the job.jar URI newURI = useSharedCache(jobJarPath.toUri(), null, statCache, conf, false); if (newURI == null) { uploadToSharedCache = true; } else { newJarPath = stringToPath(newURI.toString()); // The job jar is coming from the shared cache (i.e. a public // place), so we want the job.jar to have a public visibility. conf.setBoolean(MRJobConfig.JOBJAR_VISIBILITY, true); } } if (newJarPath == null) { newJarPath = JobSubmissionFiles.getJobJar(submitJobDir); copyJar(jobJarPath, newJarPath, submitReplication); } } else { // job jar is in a remote file system if (scConfig.isSharedCacheJobjarEnabled()) { // Don't add a resource name here because the resource name (i.e. // job.jar directory symlink) will always be hard coded to job.jar for // the job.jar URI newURI = useSharedCache(jobJarURI, null, statCache, conf, false); if (newURI == null) { uploadToSharedCache = true; newJarPath = jobJarPath; } else { newJarPath = stringToPath(newURI.toString()); // The job jar is coming from the shared cache (i.e. a public // place), so we want the job.jar to have a public visibility. conf.setBoolean(MRJobConfig.JOBJAR_VISIBILITY, true); } } else { // we don't need to upload the jobjar to the staging directory because // it is already in an accessible place newJarPath = jobJarPath; } } job.setJar(newJarPath.toString()); if (scConfig.isSharedCacheJobjarEnabled()) { conf.setBoolean(MRJobConfig.JOBJAR_SHARED_CACHE_UPLOAD_POLICY, uploadToSharedCache); } } else { LOG.warn("No job jar file set. User classes may not be found. " + "See Job or Job#setJar(String)."); } } /** * Verify that the resources this job is going to localize are within the * localization limits. We count all resources towards these limits regardless * of where they are coming from (i.e. local, distributed cache, or shared * cache). */ @VisibleForTesting void checkLocalizationLimits(Configuration conf, Collection<String> files, Collection<String> libjars, Collection<String> archives, String jobJar, Map<URI, FileStatus> statCache) throws IOException { LimitChecker limitChecker = new LimitChecker(conf); if (!limitChecker.hasLimits()) { // there are no limits set, so we are done. return; } // Get the files and archives that are already in the distributed cache Collection<String> dcFiles = conf.getStringCollection(MRJobConfig.CACHE_FILES); Collection<String> dcArchives = conf.getStringCollection(MRJobConfig.CACHE_ARCHIVES); for (String uri : dcFiles) { explorePath(conf, stringToPath(uri), limitChecker, statCache); } for (String uri : dcArchives) { explorePath(conf, stringToPath(uri), limitChecker, statCache); } for (String uri : files) { explorePath(conf, stringToPath(uri), limitChecker, statCache); } for (String uri : libjars) { explorePath(conf, stringToPath(uri), limitChecker, statCache); } for (String uri : archives) { explorePath(conf, stringToPath(uri), limitChecker, statCache); } if (jobJar != null) { explorePath(conf, stringToPath(jobJar), limitChecker, statCache); } } /** * Convert a String to a Path and gracefully remove fragments/queries if they * exist in the String. */ @VisibleForTesting Path stringToPath(String s) { try { URI uri = new URI(s); return new Path(uri.getScheme(), uri.getAuthority(), uri.getPath()); } catch (URISyntaxException e) { throw new IllegalArgumentException( "Error parsing argument." + " Argument must be a valid URI: " + s, e); } } @VisibleForTesting protected static final String MAX_RESOURCE_ERR_MSG = "This job has exceeded the maximum number of submitted resources"; @VisibleForTesting protected static final String MAX_TOTAL_RESOURCE_MB_ERR_MSG = "This job has exceeded the maximum size of submitted resources"; @VisibleForTesting protected static final String MAX_SINGLE_RESOURCE_MB_ERR_MSG = "This job has exceeded the maximum size of a single submitted resource"; private static class LimitChecker { LimitChecker(Configuration conf) { this.maxNumOfResources = conf.getInt(MRJobConfig.MAX_RESOURCES, MRJobConfig.MAX_RESOURCES_DEFAULT); this.maxSizeMB = conf.getLong(MRJobConfig.MAX_RESOURCES_MB, MRJobConfig.MAX_RESOURCES_MB_DEFAULT); this.maxSizeOfResourceMB = conf.getLong(MRJobConfig.MAX_SINGLE_RESOURCE_MB, MRJobConfig.MAX_SINGLE_RESOURCE_MB_DEFAULT); this.totalConfigSizeBytes = maxSizeMB * 1024 * 1024; this.totalConfigSizeOfResourceBytes = maxSizeOfResourceMB * 1024 * 1024; } private long totalSizeBytes = 0; private int totalNumberOfResources = 0; private long currentMaxSizeOfFileBytes = 0; private final long maxSizeMB; private final int maxNumOfResources; private final long maxSizeOfResourceMB; private final long totalConfigSizeBytes; private final long totalConfigSizeOfResourceBytes; private boolean hasLimits() { return maxNumOfResources > 0 || maxSizeMB > 0 || maxSizeOfResourceMB > 0; } private void addFile(Path p, long fileSizeBytes) throws IOException { totalNumberOfResources++; totalSizeBytes += fileSizeBytes; if (fileSizeBytes > currentMaxSizeOfFileBytes) { currentMaxSizeOfFileBytes = fileSizeBytes; } if (totalConfigSizeBytes > 0 && totalSizeBytes > totalConfigSizeBytes) { throw new IOException(MAX_TOTAL_RESOURCE_MB_ERR_MSG + " (Max: " + maxSizeMB + "MB)."); } if (maxNumOfResources > 0 && totalNumberOfResources > maxNumOfResources) { throw new IOException(MAX_RESOURCE_ERR_MSG + " (Max: " + maxNumOfResources + ")."); } if (totalConfigSizeOfResourceBytes > 0 && currentMaxSizeOfFileBytes > totalConfigSizeOfResourceBytes) { throw new IOException(MAX_SINGLE_RESOURCE_MB_ERR_MSG + " (Max: " + maxSizeOfResourceMB + "MB, Violating resource: " + p + ")."); } } } /** * Recursively explore the given path and enforce the limits for resource * localization. This method assumes that there are no symlinks in the * directory structure. */ private void explorePath(Configuration job, Path p, LimitChecker limitChecker, Map<URI, FileStatus> statCache) throws IOException { Path pathWithScheme = p; if (!pathWithScheme.toUri().isAbsolute()) { // the path does not have a scheme, so we assume it is a path from the // local filesystem FileSystem localFs = FileSystem.getLocal(job); pathWithScheme = localFs.makeQualified(p); } FileStatus status = getFileStatus(statCache, job, pathWithScheme); if (status.isDirectory()) { FileStatus[] statusArray = pathWithScheme.getFileSystem(job).listStatus(pathWithScheme); for (FileStatus s : statusArray) { explorePath(job, s.getPath(), limitChecker, statCache); } } else { limitChecker.addFile(pathWithScheme, status.getLen()); } } @VisibleForTesting FileStatus getFileStatus(Map<URI, FileStatus> statCache, Configuration job, Path p) throws IOException { URI u = p.toUri(); FileStatus status = statCache.get(u); if (status == null) { status = p.getFileSystem(job).getFileStatus(p); statCache.put(u, status); } return status; } /** * Create a new directory in the passed filesystem. This wrapper method exists * so that it can be overridden/stubbed during testing. */ @VisibleForTesting boolean mkdirs(FileSystem fs, Path dir, FsPermission permission) throws IOException { return FileSystem.mkdirs(fs, dir, permission); } // copies a file to the jobtracker filesystem and returns the path where it // was copied to @VisibleForTesting Path copyRemoteFiles(Path parentDir, Path originalPath, Configuration conf, short replication) throws IOException { // check if we do not need to copy the files // is jt using the same file system. // just checking for uri strings... doing no dns lookups // to see if the filesystems are the same. This is not optimal. // but avoids name resolution. FileSystem remoteFs = null; remoteFs = originalPath.getFileSystem(conf); if (FileUtil.compareFs(remoteFs, jtFs)) { return originalPath; } boolean root = false; if (ROOT_PATH.equals(originalPath.toUri().getPath())) { // "/" needs special treatment root = true; } else { // If originalPath ends in a "/", then remove it so // that originalPath.getName() does not return an empty string String uriString = originalPath.toUri().toString(); if (uriString.endsWith("/")) { try { URI strippedURI = new URI(uriString.substring(0, uriString.length() - 1)); originalPath = new Path(strippedURI); } catch (URISyntaxException e) { throw new IllegalArgumentException("Error processing URI", e); } } } // this might have name collisions. copy will throw an exception // parse the original path to create new path Path newPath = root ? parentDir : new Path(parentDir, originalPath.getName()); FileUtil.copy(remoteFs, originalPath, jtFs, newPath, false, conf); jtFs.setReplication(newPath, replication); jtFs.makeQualified(newPath); return newPath; } /** * Checksum a local resource file and call use for that resource with the scm. */ private URI useSharedCache(URI sourceFile, String resourceName, Map<URI, FileStatus> statCache, Configuration conf, boolean honorFragment) throws IOException { if (scClient == null) { return null; } Path filePath = new Path(sourceFile); if (getFileStatus(statCache, conf, filePath).isDirectory()) { LOG.warn("Shared cache does not support directories" + " (see YARN-6097)." + " Will not upload " + filePath + " to the shared cache."); return null; } String rn = resourceName; if (honorFragment) { if (sourceFile.getFragment() != null) { rn = sourceFile.getFragment(); } } // If for whatever reason, we can't even calculate checksum for // a resource, something is really wrong with the file system; // even non-SCM approach won't work. Let us just throw the exception. String checksum = scClient.getFileChecksum(filePath); URL url = null; try { url = scClient.use(this.appId, checksum); } catch (YarnException e) { LOG.warn("Error trying to contact the shared cache manager," + " disabling the SCMClient for the rest of this job submission", e); /* * If we fail to contact the SCM, we do not use it for the rest of this * JobResourceUploader's life. This prevents us from having to timeout * each time we try to upload a file while the SCM is unavailable. Instead * we timeout/error the first time and quickly revert to the default * behavior without the shared cache. We do this by stopping the shared * cache client and setting it to null. */ stopSharedCache(); } if (url != null) { // Because we deal with URI's in mapreduce, we need to convert the URL to // a URI and add a fragment if necessary. URI uri = null; try { String name = new Path(url.getFile()).getName(); if (rn != null && !name.equals(rn)) { // A name was specified that is different then the URL in the shared // cache. Therefore, we need to set the fragment portion of the URI to // preserve the user's desired name. We assume that there is no // existing fragment in the URL since the shared cache manager does // not use fragments. uri = new URI(url.getScheme(), url.getUserInfo(), url.getHost(), url.getPort(), url.getFile(), null, rn); } else { uri = new URI(url.getScheme(), url.getUserInfo(), url.getHost(), url.getPort(), url.getFile(), null, null); } return uri; } catch (URISyntaxException e) { LOG.warn("Error trying to convert URL received from shared cache to" + " a URI: " + url.toString()); return null; } } else { return null; } } @VisibleForTesting void copyJar(Path originalJarPath, Path submitJarFile, short replication) throws IOException { jtFs.copyFromLocalFile(originalJarPath, submitJarFile); // The operation of setReplication requires certain permissions // so we need to make sure it has enough permissions jtFs.setPermission(submitJarFile, new FsPermission( JobSubmissionFiles.JOB_FILE_PERMISSION)); jtFs.setReplication(submitJarFile, replication); } private void addLog4jToDistributedCache(Job job, Path jobSubmitDir) throws IOException { Configuration conf = job.getConfiguration(); String log4jPropertyFile = conf.get(MRJobConfig.MAPREDUCE_JOB_LOG4J_PROPERTIES_FILE, ""); if (!log4jPropertyFile.isEmpty()) { short replication = (short) conf.getInt(Job.SUBMIT_REPLICATION, 10); copyLog4jPropertyFile(job, jobSubmitDir, replication); } } private URI getPathURI(Path destPath, String fragment) throws URISyntaxException { URI pathURI = destPath.toUri(); if (pathURI.getFragment() == null) { if (fragment == null) { // no fragment, just return existing pathURI from destPath } else { pathURI = new URI(pathURI.toString() + "#" + fragment); } } return pathURI; } // copy user specified log4j.property file in local // to HDFS with putting on distributed cache and adding its parent directory // to classpath. @SuppressWarnings("deprecation") private void copyLog4jPropertyFile(Job job, Path submitJobDir, short replication) throws IOException { Configuration conf = job.getConfiguration(); String file = validateFilePath( conf.get(MRJobConfig.MAPREDUCE_JOB_LOG4J_PROPERTIES_FILE), conf); LOG.debug("default FileSystem: " + jtFs.getUri()); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); try { jtFs.getFileStatus(submitJobDir); } catch (FileNotFoundException e) { throw new IOException("Cannot find job submission directory! " + "It should just be created, so something wrong here.", e); } Path fileDir = JobSubmissionFiles.getJobLog4jFile(submitJobDir); // first copy local log4j.properties file to HDFS under submitJobDir if (file != null) { FileSystem.mkdirs(jtFs, fileDir, mapredSysPerms); URI tmpURI = null; try { tmpURI = new URI(file); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } Path tmp = new Path(tmpURI); Path newPath = copyRemoteFiles(fileDir, tmp, conf, replication); Path path = new Path(newPath.toUri().getPath()); Job.addFileToClassPath(path, conf, path.getFileSystem(conf)); } } /** * takes input as a path string for file and verifies if it exist. It defaults * for file:/// if the files specified do not have a scheme. it returns the * paths uri converted defaulting to file:///. So an input of /home/user/file1 * would return file:///home/user/file1 * * @param file * @param conf * @return */ private String validateFilePath(String file, Configuration conf) throws IOException { if (file == null) { return null; } if (file.isEmpty()) { throw new IllegalArgumentException("File name can't be empty string"); } String finalPath; URI pathURI; try { pathURI = new URI(file); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } Path path = new Path(pathURI); if (pathURI.getScheme() == null) { FileSystem localFs = FileSystem.getLocal(conf); // default to the local file system // check if the file exists or not first localFs.getFileStatus(path); finalPath = path.makeQualified(localFs.getUri(), localFs.getWorkingDirectory()) .toString(); } else { // check if the file exists in this file system // we need to recreate this filesystem object to copy // these files to the file system ResourceManager is running // on. FileSystem fs = path.getFileSystem(conf); fs.getFileStatus(path); finalPath = path.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString(); } return finalPath; } private void disableErasureCodingForPath(Path path) throws IOException { try { if (jtFs instanceof DistributedFileSystem) { LOG.info("Disabling Erasure Coding for path: " + path); DistributedFileSystem dfs = (DistributedFileSystem) jtFs; dfs.setErasureCodingPolicy(path, SystemErasureCodingPolicies.getReplicationPolicy().getName()); } } catch (RemoteException e) { if (!RpcNoSuchMethodException.class.getName().equals(e.getClassName())) { throw e; } else { if (LOG.isDebugEnabled()) { LOG.debug( "Ignore disabling erasure coding for path {} because method " + "disableErasureCodingForPath doesn't exist, probably " + "talking to a lower version HDFS.", path.toString(), e); } } } } }
apache/hadoop
35,569
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicyWithNodeGroup.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.blockmanagement; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_USE_DFS_NETWORK_TOPOLOGY_KEY; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.TestBlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NetworkTopologyWithNodeGroup; import org.apache.hadoop.net.Node; import org.junit.jupiter.api.Test; public class TestReplicationPolicyWithNodeGroup extends BaseReplicationPolicyTest { public TestReplicationPolicyWithNodeGroup() { this.blockPlacementPolicy = BlockPlacementPolicyWithNodeGroup.class.getName(); } @Override DatanodeDescriptor[] getDatanodeDescriptors(Configuration conf) { // default is true, in this case this test will against DFSNetworkTopology // but it run on NetworkTopologyWithNodeGroup, so set to false. conf.setBoolean(DFS_USE_DFS_NETWORK_TOPOLOGY_KEY, false); conf.set(CommonConfigurationKeysPublic.NET_TOPOLOGY_IMPL_KEY, NetworkTopologyWithNodeGroup.class.getName()); final String[] racks = { "/d1/r1/n1", "/d1/r1/n1", "/d1/r1/n2", "/d1/r2/n3", "/d1/r2/n3", "/d1/r2/n4", "/d2/r3/n5", "/d2/r3/n6" }; storages = DFSTestUtil.createDatanodeStorageInfos(racks); return DFSTestUtil.toDatanodeDescriptor(storages); } private static final DatanodeStorageInfo[] storagesInBoundaryCase; private static final DatanodeDescriptor[] dataNodesInBoundaryCase; static { final String[] racksInBoundaryCase = { "/d1/r1/n1", "/d1/r1/n1", "/d1/r1/n1", "/d1/r1/n2", "/d1/r2/n3", "/d1/r2/n3" }; storagesInBoundaryCase = DFSTestUtil.createDatanodeStorageInfos(racksInBoundaryCase); dataNodesInBoundaryCase = DFSTestUtil.toDatanodeDescriptor(storagesInBoundaryCase); } private static final DatanodeStorageInfo[] storagesInMoreTargetsCase; private final static DatanodeDescriptor[] dataNodesInMoreTargetsCase; static { final String[] racksInMoreTargetsCase = { "/r1/n1", "/r1/n1", "/r1/n2", "/r1/n2", "/r1/n3", "/r1/n3", "/r2/n4", "/r2/n4", "/r2/n5", "/r2/n5", "/r2/n6", "/r2/n6" }; storagesInMoreTargetsCase = DFSTestUtil.createDatanodeStorageInfos(racksInMoreTargetsCase); dataNodesInMoreTargetsCase = DFSTestUtil.toDatanodeDescriptor(storagesInMoreTargetsCase); }; private final static DatanodeDescriptor NODE = DFSTestUtil.getDatanodeDescriptor("9.9.9.9", "/d2/r4/n7"); private static final DatanodeStorageInfo[] storagesForDependencies; private static final DatanodeDescriptor[] dataNodesForDependencies; static { final String[] racksForDependencies = { "/d1/r1/n1", "/d1/r1/n1", "/d1/r1/n2", "/d1/r1/n2", "/d1/r1/n3", "/d1/r1/n4" }; final String[] hostNamesForDependencies = { "h1", "h2", "h3", "h4", "h5", "h6" }; storagesForDependencies = DFSTestUtil.createDatanodeStorageInfos( racksForDependencies, hostNamesForDependencies); dataNodesForDependencies = DFSTestUtil.toDatanodeDescriptor(storagesForDependencies); }; /** * Test block placement verification. * @throws Exception */ @Test public void testVerifyBlockPlacement() throws Exception { LocatedBlock locatedBlock; BlockPlacementStatus status; ExtendedBlock b = new ExtendedBlock("fake-pool", new Block(12345L)); List<DatanodeStorageInfo> set = new ArrayList<>(); // 2 node groups (not enough), 2 racks (enough) set.clear(); set.add(storages[0]); set.add(storages[1]); set.add(storages[4]); locatedBlock = BlockManager.newLocatedBlock(b, set.toArray(new DatanodeStorageInfo[set.size()]), 0, false); status = replicator.verifyBlockPlacement(locatedBlock.getLocations(), set.size()); assertFalse(status.isPlacementPolicySatisfied()); // 3 node groups (enough), 2 racks (enough) set.clear(); set.add(storages[0]); set.add(storages[2]); set.add(storages[5]); locatedBlock = BlockManager.newLocatedBlock(b, set.toArray(new DatanodeStorageInfo[set.size()]), 0, false); status = replicator.verifyBlockPlacement(locatedBlock.getLocations(), set.size()); assertTrue(status.isPlacementPolicySatisfied()); // 2 node groups (not enough), 1 rack (not enough) set.clear(); set.add(storages[0]); set.add(storages[1]); set.add(storages[2]); locatedBlock = BlockManager.newLocatedBlock(b, set.toArray(new DatanodeStorageInfo[set.size()]), 0, false); status = replicator.verifyBlockPlacement(locatedBlock.getLocations(), set.size()); assertFalse(status.isPlacementPolicySatisfied()); assertTrue(status.getErrorDescription().contains("node group")); assertTrue(status.getErrorDescription().contains("more rack(s)")); // 3 node groups (enough), 3 racks (enough) set.clear(); set.add(storages[0]); set.add(storages[5]); set.add(storages[7]); locatedBlock = BlockManager.newLocatedBlock(b, set.toArray(new DatanodeStorageInfo[set.size()]), 0, false); status = replicator.verifyBlockPlacement(locatedBlock.getLocations(), set.size()); assertTrue(status.isPlacementPolicySatisfied()); // 3 node groups (not enough), 3 racks (enough), 4 replicas set.clear(); set.add(storages[0]); set.add(storages[1]); set.add(storages[5]); set.add(storages[7]); locatedBlock = BlockManager.newLocatedBlock(b, set.toArray(new DatanodeStorageInfo[set.size()]), 0, false); status = replicator.verifyBlockPlacement(locatedBlock.getLocations(), set.size()); assertFalse(status.isPlacementPolicySatisfied()); assertTrue(status.getErrorDescription().contains("node group")); assertFalse(status.getErrorDescription().contains("more rack(s)")); // 2 node groups (not enough), 1 rack (not enough) set.clear(); set.add(storages[0]); set.add(storages[1]); set.add(storages[2]); locatedBlock = BlockManager.newLocatedBlock(b, set.toArray(new DatanodeStorageInfo[set.size()]), 0, false); status = replicator.verifyBlockPlacement(locatedBlock.getLocations(), set.size()); assertFalse(status.isPlacementPolicySatisfied()); assertTrue(status.getErrorDescription().contains("node group")); assertTrue(status.getErrorDescription().contains("more rack(s)")); // 1 node group (not enough), 1 rack (not enough) set.clear(); set.add(storages[0]); set.add(storages[1]); locatedBlock = BlockManager.newLocatedBlock(b, set.toArray(new DatanodeStorageInfo[set.size()]), 0, false); status = replicator.verifyBlockPlacement(locatedBlock.getLocations(), set.size()); assertFalse(status.isPlacementPolicySatisfied()); assertTrue(status.getErrorDescription().contains("node group")); assertTrue(status.getErrorDescription().contains("more rack(s)")); } /** * Scan the targets list: all targets should be on different NodeGroups. * Return false if two targets are found on the same NodeGroup. */ private static boolean checkTargetsOnDifferentNodeGroup( DatanodeStorageInfo[] targets) { if(targets.length == 0) return true; Set<String> targetSet = new HashSet<>(); for(DatanodeStorageInfo storage:targets) { final DatanodeDescriptor node = storage.getDatanodeDescriptor(); String nodeGroup = NetworkTopology.getLastHalf(node.getNetworkLocation()); if(targetSet.contains(nodeGroup)) { return false; } else { targetSet.add(nodeGroup); } } return true; } private boolean isOnSameRack(DatanodeDescriptor left, DatanodeStorageInfo right) { return cluster.isOnSameRack(left, right.getDatanodeDescriptor()); } private boolean isOnSameNodeGroup(DatanodeStorageInfo left, DatanodeStorageInfo right) { return isOnSameNodeGroup(left.getDatanodeDescriptor(), right); } private boolean isOnSameNodeGroup(DatanodeDescriptor left, DatanodeStorageInfo right) { return cluster.isOnSameNodeGroup(left, right.getDatanodeDescriptor()); } private DatanodeStorageInfo[] chooseTarget( int numOfReplicas, DatanodeDescriptor writer, Set<Node> excludedNodes, List<DatanodeDescriptor> favoredNodes) { return replicator.chooseTarget(filename, numOfReplicas, writer, excludedNodes, BLOCK_SIZE, favoredNodes, TestBlockStoragePolicy.DEFAULT_STORAGE_POLICY, null); } /** * In this testcase, client is dataNodes[0]. So the 1st replica should be * placed on dataNodes[0], the 2nd replica should be placed on * different rack and third should be placed on different node (and node group) * of rack chosen for 2nd node. * The only excpetion is when the <i>numOfReplicas</i> is 2, * the 1st is on dataNodes[0] and the 2nd is on a different rack. * @throws Exception */ @Test public void testChooseTarget1() throws Exception { updateHeartbeatWithUsage(dataNodes[0], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 4, 0); // overloaded DatanodeStorageInfo[] targets; targets = chooseTarget(0); assertEquals(targets.length, 0); targets = chooseTarget(1); assertEquals(targets.length, 1); assertEquals(storages[0], targets[0]); targets = chooseTarget(2); assertEquals(targets.length, 2); assertEquals(storages[0], targets[0]); assertFalse(isOnSameRack(targets[0], targets[1])); targets = chooseTarget(3); assertEquals(targets.length, 3); assertEquals(storages[0], targets[0]); assertFalse(isOnSameRack(targets[0], targets[1])); assertTrue(isOnSameRack(targets[1], targets[2])); assertFalse(isOnSameNodeGroup(targets[1], targets[2])); targets = chooseTarget(4); assertEquals(targets.length, 4); assertEquals(storages[0], targets[0]); assertTrue(isOnSameRack(targets[1], targets[2]) || isOnSameRack(targets[2], targets[3])); assertFalse(isOnSameRack(targets[0], targets[2])); // Make sure no more than one replicas are on the same nodegroup verifyNoTwoTargetsOnSameNodeGroup(targets); updateHeartbeatWithUsage(dataNodes[0], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); } private void verifyNoTwoTargetsOnSameNodeGroup(DatanodeStorageInfo[] targets) { Set<String> nodeGroupSet = new HashSet<>(); for (DatanodeStorageInfo target: targets) { nodeGroupSet.add(target.getDatanodeDescriptor().getNetworkLocation()); } assertEquals(nodeGroupSet.size(), targets.length); } /** * In this testcase, client is dataNodes[0], but the dataNodes[1] is * not allowed to be chosen. So the 1st replica should be * placed on dataNodes[0], the 2nd replica should be placed on a different * rack, the 3rd should be on same rack as the 2nd replica but in different * node group, and the rest should be placed on a third rack. * @throws Exception */ @Test public void testChooseTarget2() throws Exception { DatanodeStorageInfo[] targets; BlockPlacementPolicyDefault repl = (BlockPlacementPolicyDefault)replicator; List<DatanodeStorageInfo> chosenNodes = new ArrayList<>(); Set<Node> excludedNodes = new HashSet<>(); excludedNodes.add(dataNodes[1]); targets = repl.chooseTarget(filename, 4, dataNodes[0], chosenNodes, false, excludedNodes, BLOCK_SIZE, TestBlockStoragePolicy.DEFAULT_STORAGE_POLICY, null); assertEquals(targets.length, 4); assertEquals(storages[0], targets[0]); assertTrue(cluster.isNodeGroupAware()); // Make sure no replicas are on the same nodegroup for (int i=1;i<4;i++) { assertFalse(isOnSameNodeGroup(targets[0], targets[i])); } assertTrue(isOnSameRack(targets[1], targets[2]) || isOnSameRack(targets[2], targets[3])); assertFalse(isOnSameRack(targets[1], targets[3])); excludedNodes.clear(); chosenNodes.clear(); excludedNodes.add(dataNodes[1]); chosenNodes.add(storages[2]); targets = repl.chooseTarget(filename, 1, dataNodes[0], chosenNodes, true, excludedNodes, BLOCK_SIZE, TestBlockStoragePolicy.DEFAULT_STORAGE_POLICY, null); System.out.println("targets=" + Arrays.asList(targets)); assertEquals(2, targets.length); //make sure that the chosen node is in the target. int i = 0; for(; i < targets.length && !storages[2].equals(targets[i]); i++); assertTrue(i < targets.length); } /** * In this testcase, client is dataNodes[0], but dataNodes[0] is not qualified * to be chosen. So the 1st replica should be placed on dataNodes[1], * the 2nd replica should be placed on a different rack, * the 3rd replica should be placed on the same rack as the 2nd replica but in different nodegroup, * and the rest should be placed on the third rack. * @throws Exception */ @Test public void testChooseTarget3() throws Exception { // make data node 0 to be not qualified to choose updateHeartbeatWithUsage(dataNodes[0], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, (HdfsServerConstants.MIN_BLOCKS_FOR_WRITE-1)*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); // no space DatanodeStorageInfo[] targets; targets = chooseTarget(0); assertEquals(targets.length, 0); targets = chooseTarget(1); assertEquals(targets.length, 1); assertEquals(storages[1], targets[0]); targets = chooseTarget(2); assertEquals(targets.length, 2); assertEquals(storages[1], targets[0]); assertFalse(isOnSameRack(targets[0], targets[1])); targets = chooseTarget(3); assertEquals(targets.length, 3); assertEquals(storages[1], targets[0]); assertTrue(isOnSameRack(targets[1], targets[2])); assertFalse(isOnSameRack(targets[0], targets[1])); targets = chooseTarget(4); assertEquals(targets.length, 4); assertEquals(storages[1], targets[0]); assertTrue(cluster.isNodeGroupAware()); verifyNoTwoTargetsOnSameNodeGroup(targets); assertTrue(isOnSameRack(targets[1], targets[2]) || isOnSameRack(targets[2], targets[3])); updateHeartbeatWithUsage(dataNodes[0], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); } /** * In this testcase, client is dataNodes[0], but none of the nodes on rack 1 * is qualified to be chosen. So the 1st replica should be placed on either * rack 2 or rack 3. * the 2nd replica should be placed on a different rack, * the 3rd replica should be placed on the same rack as the 1st replica, but * in different node group. * @throws Exception */ @Test public void testChooseTarget4() throws Exception { // make data node 0-2 to be not qualified to choose: not enough disk space for(int i=0; i<3; i++) { updateHeartbeatWithUsage(dataNodes[i], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, (HdfsServerConstants.MIN_BLOCKS_FOR_WRITE-1)*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); } DatanodeStorageInfo[] targets; targets = chooseTarget(0); assertEquals(targets.length, 0); targets = chooseTarget(1); assertEquals(targets.length, 1); assertFalse(isOnSameRack(dataNodes[0], targets[0])); targets = chooseTarget(2); assertEquals(targets.length, 2); assertFalse(isOnSameRack(dataNodes[0], targets[0])); assertFalse(isOnSameRack(targets[0], targets[1])); targets = chooseTarget(3); assertEquals(targets.length, 3); for(int i=0; i<3; i++) { assertFalse(isOnSameRack(dataNodes[0], targets[i])); } verifyNoTwoTargetsOnSameNodeGroup(targets); assertTrue(isOnSameRack(targets[0], targets[1]) || isOnSameRack(targets[1], targets[2])); assertFalse(isOnSameRack(targets[0], targets[2])); } /** * In this testcase, client is is a node outside of file system. * So the 1st replica can be placed on any node. * the 2nd replica should be placed on a different rack, * the 3rd replica should be placed on the same rack as the 2nd replica, * @throws Exception */ @Test public void testChooseTarget5() throws Exception { updateHeartbeatWithUsage(); DatanodeStorageInfo[] targets; targets = chooseTarget(0, NODE); assertEquals(targets.length, 0); targets = chooseTarget(1, NODE); assertEquals(targets.length, 1); targets = chooseTarget(2, NODE); assertEquals(targets.length, 2); assertFalse(isOnSameRack(targets[0], targets[1])); targets = chooseTarget(3, NODE); assertEquals(targets.length, 3); assertTrue(isOnSameRack(targets[1], targets[2])); assertFalse(isOnSameRack(targets[0], targets[1])); verifyNoTwoTargetsOnSameNodeGroup(targets); } /** * In this testcase, client is dataNodes[7], but it is not qualified * to be chosen. And there is no other node available on client Node group. * So the 1st replica should be placed on client local rack dataNodes[6] * @throws Exception */ @Test public void testChooseTargetForLocalStorage() throws Exception { updateHeartbeatWithUsage(dataNodes[7], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, (HdfsServerConstants.MIN_BLOCKS_FOR_WRITE-1)*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); // no space DatanodeStorageInfo[] targets; targets = chooseTarget(1, dataNodes[7]); assertEquals(targets.length, 1); assertTrue(targets[0].getDatanodeDescriptor().equals(dataNodes[6])); } /** * This testcase tests re-replication, when dataNodes[0] is already chosen. * So the 1st replica can be placed on random rack. * the 2nd replica should be placed on different node and nodegroup by same rack as * the 1st replica. The 3rd replica can be placed randomly. * @throws Exception */ @Test public void testRereplicate1() throws Exception { updateHeartbeatWithUsage(); List<DatanodeStorageInfo> chosenNodes = new ArrayList<>(); chosenNodes.add(storages[0]); DatanodeStorageInfo[] targets; targets = chooseTarget(0, chosenNodes); assertEquals(targets.length, 0); targets = chooseTarget(1, chosenNodes); assertEquals(targets.length, 1); assertFalse(isOnSameRack(dataNodes[0], targets[0])); targets = chooseTarget(2, chosenNodes); assertEquals(targets.length, 2); assertTrue(isOnSameRack(dataNodes[0], targets[0])); assertFalse(isOnSameRack(targets[0], targets[1])); targets = chooseTarget(3, chosenNodes); assertEquals(targets.length, 3); assertTrue(isOnSameRack(dataNodes[0], targets[0])); assertFalse(isOnSameNodeGroup(dataNodes[0], targets[0])); assertFalse(isOnSameRack(targets[0], targets[2])); } /** * This testcase tests re-replication, * when dataNodes[0] and dataNodes[1] are already chosen. * So the 1st replica should be placed on a different rack of rack 1. * the rest replicas can be placed randomly, * @throws Exception */ @Test public void testRereplicate2() throws Exception { updateHeartbeatWithUsage(); List<DatanodeStorageInfo> chosenNodes = new ArrayList<>(); chosenNodes.add(storages[0]); chosenNodes.add(storages[1]); DatanodeStorageInfo[] targets; targets = chooseTarget(0, chosenNodes); assertEquals(targets.length, 0); targets = chooseTarget(1, chosenNodes); assertEquals(targets.length, 1); assertFalse(isOnSameRack(dataNodes[0], targets[0])); targets = chooseTarget(2, chosenNodes); assertEquals(targets.length, 2); assertFalse(isOnSameRack(dataNodes[0], targets[0]) && isOnSameRack(dataNodes[0], targets[1])); } /** * This testcase tests re-replication, * when dataNodes[0] and dataNodes[3] are already chosen. * So the 1st replica should be placed on the rack that the writer resides. * the rest replicas can be placed randomly, * @throws Exception */ @Test public void testRereplicate3() throws Exception { updateHeartbeatWithUsage(); List<DatanodeStorageInfo> chosenNodes = new ArrayList<>(); chosenNodes.add(storages[0]); chosenNodes.add(storages[3]); DatanodeStorageInfo[] targets; targets = chooseTarget(0, chosenNodes); assertEquals(targets.length, 0); targets = chooseTarget(1, chosenNodes); assertEquals(targets.length, 1); assertTrue(isOnSameRack(dataNodes[0], targets[0])); assertFalse(isOnSameRack(dataNodes[3], targets[0])); targets = chooseTarget(1, dataNodes[3], chosenNodes); assertEquals(targets.length, 1); assertTrue(isOnSameRack(dataNodes[3], targets[0])); assertFalse(isOnSameNodeGroup(dataNodes[3], targets[0])); assertFalse(isOnSameRack(dataNodes[0], targets[0])); targets = chooseTarget(2, chosenNodes); assertEquals(targets.length, 2); assertTrue(isOnSameRack(dataNodes[0], targets[0])); assertFalse(isOnSameNodeGroup(dataNodes[0], targets[0])); targets = chooseTarget(2, dataNodes[3], chosenNodes); assertEquals(targets.length, 2); assertTrue(isOnSameRack(dataNodes[3], targets[0])); } /** * Test for the chooseReplicaToDelete are processed based on * block locality and free space */ @Test public void testChooseReplicaToDelete() throws Exception { List<DatanodeStorageInfo> replicaList = new ArrayList<>(); final Map<String, List<DatanodeStorageInfo>> rackMap = new HashMap<>(); storages[0].setRemainingForTests(4*1024*1024); dataNodes[0].setRemaining(calculateRemaining(dataNodes[0])); replicaList.add(storages[0]); storages[1].setRemainingForTests(3*1024*1024); dataNodes[1].setRemaining(calculateRemaining(dataNodes[1])); replicaList.add(storages[1]); storages[2].setRemainingForTests(2*1024*1024); dataNodes[2].setRemaining(calculateRemaining(dataNodes[2])); replicaList.add(storages[2]); storages[4].setRemainingForTests(100 * 1024 * 1024); storages[5].setRemainingForTests(512 * 1024); dataNodes[5].setRemaining(calculateRemaining(dataNodes[5])); replicaList.add(storages[5]); List<DatanodeStorageInfo> first = new ArrayList<>(); List<DatanodeStorageInfo> second = new ArrayList<>(); replicator.splitNodesWithRack(replicaList, replicaList, rackMap, first, second); assertEquals(3, first.size()); assertEquals(1, second.size()); List<StorageType> excessTypes = new ArrayList<>(); excessTypes.add(StorageType.DEFAULT); DatanodeStorageInfo chosen = ((BlockPlacementPolicyDefault) replicator) .chooseReplicaToDelete(first, second, excessTypes, rackMap); // Within first set {dataNodes[0], dataNodes[1], dataNodes[2]}, // dataNodes[0] and dataNodes[1] are in the same nodegroup, // but dataNodes[1] is chosen as less free space assertEquals(chosen, storages[1]); replicator.adjustSetsWithChosenReplica(rackMap, first, second, chosen); assertEquals(2, first.size()); assertEquals(1, second.size()); // Within first set {dataNodes[0], dataNodes[2]}, dataNodes[2] is chosen // as less free space excessTypes.add(StorageType.DEFAULT); chosen = ((BlockPlacementPolicyDefault) replicator).chooseReplicaToDelete( first, second, excessTypes, rackMap); assertEquals(chosen, storages[2]); replicator.adjustSetsWithChosenReplica(rackMap, first, second, chosen); assertEquals(0, first.size()); assertEquals(2, second.size()); // Within second set, dataNodes[5] with less free space excessTypes.add(StorageType.DEFAULT); chosen = ((BlockPlacementPolicyDefault) replicator).chooseReplicaToDelete( first, second, excessTypes, rackMap); assertEquals(chosen, storages[5]); } private long calculateRemaining(DatanodeDescriptor dataNode) { long sum = 0; for (DatanodeStorageInfo storageInfo: dataNode.getStorageInfos()){ sum += storageInfo.getRemaining(); } return sum; } /** * Test replica placement policy in case of boundary topology. * Rack 2 has only 1 node group & can't be placed with two replicas * The 1st replica will be placed on writer. * The 2nd replica should be placed on a different rack * The 3rd replica should be placed on the same rack with writer, but on a * different node group. */ @Test public void testChooseTargetsOnBoundaryTopology() throws Exception { for(int i=0; i<dataNodes.length; i++) { cluster.remove(dataNodes[i]); } for(int i=0; i<dataNodesInBoundaryCase.length; i++) { cluster.add(dataNodesInBoundaryCase[i]); } for(int i=0; i<dataNodesInBoundaryCase.length; i++) { updateHeartbeatWithUsage(dataNodesInBoundaryCase[i], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); } DatanodeStorageInfo[] targets; targets = chooseTarget(0, dataNodesInBoundaryCase[0]); assertEquals(targets.length, 0); targets = chooseTarget(1, dataNodesInBoundaryCase[0]); assertEquals(targets.length, 1); targets = chooseTarget(2, dataNodesInBoundaryCase[0]); assertEquals(targets.length, 2); assertFalse(isOnSameRack(targets[0], targets[1])); targets = chooseTarget(3, dataNodesInBoundaryCase[0]); assertEquals(targets.length, 3); assertTrue(checkTargetsOnDifferentNodeGroup(targets)); } /** * Test re-replication policy in boundary case. * Rack 2 has only one node group & the node in this node group is chosen * Rack 1 has two nodegroups & one of them is chosen. * Replica policy should choose the node from node group of Rack1 but not the * same nodegroup with chosen nodes. */ @Test public void testRereplicateOnBoundaryTopology() throws Exception { for(int i=0; i<dataNodesInBoundaryCase.length; i++) { updateHeartbeatWithUsage(dataNodesInBoundaryCase[i], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); } List<DatanodeStorageInfo> chosenNodes = new ArrayList<>(); chosenNodes.add(storagesInBoundaryCase[0]); chosenNodes.add(storagesInBoundaryCase[5]); DatanodeStorageInfo[] targets; targets = chooseTarget(1, dataNodesInBoundaryCase[0], chosenNodes); assertFalse(isOnSameNodeGroup(dataNodesInBoundaryCase[0], targets[0])); assertFalse(isOnSameNodeGroup(dataNodesInBoundaryCase[5], targets[0])); assertTrue(checkTargetsOnDifferentNodeGroup(targets)); } /** * Test replica placement policy in case of targets more than number of * NodeGroups. * The 12-nodes cluster only has 6 NodeGroups, but in some cases, like: * placing submitted job file, there is requirement to choose more (10) * targets for placing replica. We should test it can return 6 targets. */ @Test public void testChooseMoreTargetsThanNodeGroups() throws Exception { for(int i=0; i<dataNodes.length; i++) { cluster.remove(dataNodes[i]); } for(int i=0; i<dataNodesInBoundaryCase.length; i++) { DatanodeDescriptor node = dataNodesInBoundaryCase[i]; if (cluster.contains(node)) { cluster.remove(node); } } for(int i=0; i<dataNodesInMoreTargetsCase.length; i++) { cluster.add(dataNodesInMoreTargetsCase[i]); } for(int i=0; i<dataNodesInMoreTargetsCase.length; i++) { updateHeartbeatWithUsage(dataNodesInMoreTargetsCase[i], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); } DatanodeStorageInfo[] targets; // Test normal case -- 3 replicas targets = chooseTarget(3, dataNodesInMoreTargetsCase[0]); assertEquals(targets.length, 3); assertTrue(checkTargetsOnDifferentNodeGroup(targets)); // Test special case -- replica number over node groups. targets = chooseTarget(10, dataNodesInMoreTargetsCase[0]); assertTrue(checkTargetsOnDifferentNodeGroup(targets)); // Verify it only can find 6 targets for placing replicas. assertEquals(targets.length, 6); } @Test public void testChooseTargetWithDependencies() throws Exception { for(int i=0; i<dataNodes.length; i++) { cluster.remove(dataNodes[i]); } for(int i=0; i<dataNodesInMoreTargetsCase.length; i++) { DatanodeDescriptor node = dataNodesInMoreTargetsCase[i]; if (cluster.contains(node)) { cluster.remove(node); } } Host2NodesMap host2DatanodeMap = namenode.getNamesystem() .getBlockManager() .getDatanodeManager().getHost2DatanodeMap(); for(int i=0; i<dataNodesForDependencies.length; i++) { cluster.add(dataNodesForDependencies[i]); host2DatanodeMap.add(dataNodesForDependencies[i]); } //add dependencies (node1 <-> node2, and node3<->node4) dataNodesForDependencies[1].addDependentHostName( dataNodesForDependencies[2].getHostName()); dataNodesForDependencies[2].addDependentHostName( dataNodesForDependencies[1].getHostName()); dataNodesForDependencies[3].addDependentHostName( dataNodesForDependencies[4].getHostName()); dataNodesForDependencies[4].addDependentHostName( dataNodesForDependencies[3].getHostName()); //Update heartbeat for(int i=0; i<dataNodesForDependencies.length; i++) { updateHeartbeatWithUsage(dataNodesForDependencies[i], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); } List<DatanodeStorageInfo> chosenNodes = new ArrayList<>(); DatanodeStorageInfo[] targets; Set<Node> excludedNodes = new HashSet<>(); excludedNodes.add(dataNodesForDependencies[5]); //try to select three targets as there are three node groups targets = chooseTarget(3, dataNodesForDependencies[1], chosenNodes, excludedNodes); //Even there are three node groups, verify that //only two targets are selected due to dependencies assertEquals(targets.length, 2); assertEquals(targets[0], storagesForDependencies[1]); assertTrue(targets[1].equals(storagesForDependencies[3]) || targets[1].equals(storagesForDependencies[4])); //verify that all data nodes are in the excluded list assertEquals(excludedNodes.size(), dataNodesForDependencies.length); for(int i=0; i<dataNodesForDependencies.length; i++) { assertTrue(excludedNodes.contains(dataNodesForDependencies[i])); } } /** * In this testcase, favored node is dataNodes[6]. * 1st replica should be placed on favored node. * @throws Exception */ @Test public void testChooseTargetAsFavouredNodes() throws Exception { DatanodeStorageInfo[] targets; List<DatanodeDescriptor> favoredNodes = new ArrayList<DatanodeDescriptor>(); favoredNodes.add(dataNodes[6]); favoredNodes.add(dataNodes[0]); favoredNodes.add(dataNodes[1]); targets = chooseTarget(1, dataNodes[7], null, favoredNodes); assertEquals(targets.length, 1); assertTrue(favoredNodes.contains(targets[0].getDatanodeDescriptor())); } /** * In this testcase, passed 2 favored nodes * dataNodes[0](Good Node), dataNodes[3](Bad node). * 1st replica should be placed on good favored node dataNodes[0]. * 2nd replica should be on bad favored node's nodegroup dataNodes[4]. * @throws Exception */ @Test public void testChooseFavoredNodesNodeGroup() throws Exception { updateHeartbeatWithUsage(dataNodes[3], 2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, (HdfsServerConstants.MIN_BLOCKS_FOR_WRITE-1)*BLOCK_SIZE, 0L, 0L, 0L, 0, 0); // no space DatanodeStorageInfo[] targets; List<DatanodeDescriptor> expectedTargets = new ArrayList<DatanodeDescriptor>(); expectedTargets.add(dataNodes[0]); expectedTargets.add(dataNodes[4]); List<DatanodeDescriptor> favouredNodes = new ArrayList<DatanodeDescriptor>(); favouredNodes.add(dataNodes[3]); favouredNodes.add(dataNodes[0]); targets = chooseTarget(2, dataNodes[7], null, favouredNodes); assertTrue(expectedTargets.contains(targets[0].getDatanodeDescriptor()), "1st Replica is incorrect"); assertTrue(expectedTargets.contains(targets[1].getDatanodeDescriptor()), "2nd Replica is incorrect"); } /** * In this testcase, passed 3 favored nodes * dataNodes[0],dataNodes[1],dataNodes[2] * * Favored nodes on different nodegroup should be selected. Remaining replica * should go through BlockPlacementPolicy. * * @throws Exception */ @Test public void testChooseRemainingReplicasApartFromFavoredNodes() throws Exception { DatanodeStorageInfo[] targets; List<DatanodeDescriptor> expectedTargets = new ArrayList<DatanodeDescriptor>(); expectedTargets.add(dataNodes[0]); expectedTargets.add(dataNodes[2]); expectedTargets.add(dataNodes[3]); expectedTargets.add(dataNodes[6]); expectedTargets.add(dataNodes[7]); List<DatanodeDescriptor> favouredNodes = new ArrayList<DatanodeDescriptor>(); favouredNodes.add(dataNodes[0]); favouredNodes.add(dataNodes[1]); favouredNodes.add(dataNodes[2]); targets = chooseTarget(3, dataNodes[3], null, favouredNodes); for (int i = 0; i < targets.length; i++) { assertTrue(expectedTargets.contains(targets[i].getDatanodeDescriptor()), "Target should be a part of Expected Targets"); } } }
apache/juneau
33,135
juneau-core/juneau-marshall/src/main/java/org/apache/juneau/html/HtmlSerializerSession.java
// *************************************************************************************************************************** // * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file * // * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * // * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * // * with the License. You may obtain a copy of the License at * // * * // * http://www.apache.org/licenses/LICENSE-2.0 * // * * // * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an * // * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * // * specific language governing permissions and limitations under the License. * // *************************************************************************************************************************** package org.apache.juneau.html; import static org.apache.juneau.common.utils.IOUtils.*; import static org.apache.juneau.common.utils.StringUtils.*; import static org.apache.juneau.common.utils.Utils.*; import static org.apache.juneau.xml.XmlSerializerSession.ContentResult.*; import java.io.*; import java.lang.reflect.*; import java.nio.charset.*; import java.util.*; import java.util.function.*; import java.util.regex.*; import org.apache.juneau.*; import org.apache.juneau.common.utils.*; import org.apache.juneau.html.annotation.*; import org.apache.juneau.httppart.*; import org.apache.juneau.internal.*; import org.apache.juneau.serializer.*; import org.apache.juneau.svl.*; import org.apache.juneau.swap.*; import org.apache.juneau.xml.*; import org.apache.juneau.xml.annotation.*; /** * Session object that lives for the duration of a single use of {@link HtmlSerializer}. * * <h5 class='section'>Notes:</h5><ul> * <li class='warn'>This class is not thread safe and is typically discarded after one use. * </ul> * * <h5 class='section'>See Also:</h5><ul> * <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/HtmlBasics">HTML Basics</a> * </ul> */ public class HtmlSerializerSession extends XmlSerializerSession { //----------------------------------------------------------------------------------------------------------------- // Static //----------------------------------------------------------------------------------------------------------------- /** * Creates a new builder for this object. * * @param ctx The context creating this session. * @return A new builder. */ public static Builder create(HtmlSerializer ctx) { return new Builder(ctx); } //----------------------------------------------------------------------------------------------------------------- // Builder //----------------------------------------------------------------------------------------------------------------- /** * Builder class. */ @FluentSetters public static class Builder extends XmlSerializerSession.Builder { HtmlSerializer ctx; /** * Constructor * * @param ctx The context creating this session. */ protected Builder(HtmlSerializer ctx) { super(ctx); this.ctx = ctx; } @Override public HtmlSerializerSession build() { return new HtmlSerializerSession(this); } // <FluentSetters> @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public <T> Builder apply(Class<T> type, Consumer<T> apply) { super.apply(type, apply); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder debug(Boolean value) { super.debug(value); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder properties(Map<String,Object> value) { super.properties(value); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder property(String key, Object value) { super.property(key, value); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder unmodifiable() { super.unmodifiable(); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder locale(Locale value) { super.locale(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder localeDefault(Locale value) { super.localeDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder mediaType(MediaType value) { super.mediaType(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder mediaTypeDefault(MediaType value) { super.mediaTypeDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder timeZone(TimeZone value) { super.timeZone(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder timeZoneDefault(TimeZone value) { super.timeZoneDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.SerializerSession.Builder */ public Builder javaMethod(Method value) { super.javaMethod(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.SerializerSession.Builder */ public Builder resolver(VarResolverSession value) { super.resolver(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.SerializerSession.Builder */ public Builder schema(HttpPartSchema value) { super.schema(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.SerializerSession.Builder */ public Builder schemaDefault(HttpPartSchema value) { super.schemaDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.SerializerSession.Builder */ public Builder uriContext(UriContext value) { super.uriContext(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.WriterSerializerSession.Builder */ public Builder fileCharset(Charset value) { super.fileCharset(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.WriterSerializerSession.Builder */ public Builder streamCharset(Charset value) { super.streamCharset(value); return this; } @Override /* GENERATED - org.apache.juneau.serializer.WriterSerializerSession.Builder */ public Builder useWhitespace(Boolean value) { super.useWhitespace(value); return this; } // </FluentSetters> } //----------------------------------------------------------------------------------------------------------------- // Instance //----------------------------------------------------------------------------------------------------------------- private final HtmlSerializer ctx; private final Pattern urlPattern = Pattern.compile("http[s]?\\:\\/\\/.*"); private final Pattern labelPattern; /** * Constructor. * * @param builder The builder for this object. */ protected HtmlSerializerSession(Builder builder) { super(builder); ctx = builder.ctx; labelPattern = Pattern.compile("[\\?\\&]" + Pattern.quote(ctx.getLabelParameter()) + "=([^\\&]*)"); } /** * Converts the specified output target object to an {@link HtmlWriter}. * * @param out The output target object. * @return The output target object wrapped in an {@link HtmlWriter}. * @throws IOException Thrown by underlying stream. */ protected final HtmlWriter getHtmlWriter(SerializerPipe out) throws IOException { Object output = out.getRawOutput(); if (output instanceof HtmlWriter) return (HtmlWriter)output; HtmlWriter w = new HtmlWriter(out.getWriter(), isUseWhitespace(), getMaxIndent(), isTrimStrings(), getQuoteChar(), getUriResolver()); out.setWriter(w); return w; } /** * Returns <jk>true</jk> if the specified object is a URL. * * @param cm The ClassMeta of the object being serialized. * @param pMeta * The property metadata of the bean property of the object. * Can be <jk>null</jk> if the object isn't from a bean property. * @param o The object. * @return <jk>true</jk> if the specified object is a URL. */ public boolean isUri(ClassMeta<?> cm, BeanPropertyMeta pMeta, Object o) { if (cm.isUri() || (pMeta != null && pMeta.isUri())) return true; if (isDetectLinksInStrings() && o instanceof CharSequence && urlPattern.matcher(o.toString()).matches()) return true; return false; } /** * Returns the anchor text to use for the specified URL object. * * @param pMeta * The property metadata of the bean property of the object. * Can be <jk>null</jk> if the object isn't from a bean property. * @param o The URL object. * @return The anchor text to use for the specified URL object. */ public String getAnchorText(BeanPropertyMeta pMeta, Object o) { String s = o.toString(); if (isDetectLabelParameters()) { Matcher m = labelPattern.matcher(s); if (m.find()) return urlDecode(m.group(1)); } switch (getUriAnchorText()) { case LAST_TOKEN: s = resolveUri(s); if (s.indexOf('/') != -1) s = s.substring(s.lastIndexOf('/')+1); if (s.indexOf('?') != -1) s = s.substring(0, s.indexOf('?')); if (s.indexOf('#') != -1) s = s.substring(0, s.indexOf('#')); if (s.isEmpty()) s = "/"; return urlDecode(s); case URI_ANCHOR: if (s.indexOf('#') != -1) s = s.substring(s.lastIndexOf('#')+1); return urlDecode(s); case PROPERTY_NAME: return pMeta == null ? s : pMeta.getName(); case URI: return resolveUri(s); case CONTEXT_RELATIVE: return relativizeUri("context:/", s); case SERVLET_RELATIVE: return relativizeUri("servlet:/", s); case PATH_RELATIVE: return relativizeUri("request:/", s); default /* TO_STRING */: return s; } } @Override /* XmlSerializer */ public boolean isHtmlMode() { return true; } @Override /* Serializer */ protected void doSerialize(SerializerPipe out, Object o) throws IOException, SerializeException { doSerialize(o, getHtmlWriter(out)); } /** * Main serialization routine. * * @param session The serialization context object. * @param o The object being serialized. * @param w The writer to serialize to. * @return The same writer passed in. * @throws IOException If a problem occurred trying to send output to the writer. */ private XmlWriter doSerialize(Object o, XmlWriter w) throws IOException, SerializeException { serializeAnything(w, o, getExpectedRootType(o), null, null, getInitialDepth()-1, true, false); return w; } @SuppressWarnings({ "rawtypes" }) @Override /* XmlSerializerSession */ protected ContentResult serializeAnything( XmlWriter out, Object o, ClassMeta<?> eType, String keyName, String elementName, Namespace elementNamespace, boolean addNamespaceUris, XmlFormat format, boolean isMixed, boolean preserveWhitespace, BeanPropertyMeta pMeta) throws SerializeException { // If this is a bean, then we want to serialize it as HTML unless it's @Html(format=XML). ClassMeta<?> type = push2(elementName, o, eType); pop(); if (type == null) type = object(); else if (type.isDelegate()) type = ((Delegate)o).getClassMeta(); ObjectSwap swap = type.getSwap(this); if (swap != null) { o = swap(swap, o); type = swap.getSwapClassMeta(this); if (type.isObject()) type = getClassMetaForObject(o); } HtmlClassMeta cHtml = getHtmlClassMeta(type); if (type.isMapOrBean() && ! cHtml.isXml()) return serializeAnything(out, o, eType, elementName, pMeta, 0, false, false); return super.serializeAnything(out, o, eType, keyName, elementName, elementNamespace, addNamespaceUris, format, isMixed, preserveWhitespace, pMeta); } /** * Serialize the specified object to the specified writer. * * @param out The writer. * @param o The object to serialize. * @param eType The expected type of the object if this is a bean property. * @param name * The attribute name of this object if this object was a field in a JSON object (i.e. key of a * {@link java.util.Map.Entry} or property name of a bean). * @param pMeta The bean property being serialized, or <jk>null</jk> if we're not serializing a bean property. * @param xIndent The current indentation value. * @param isRoot <jk>true</jk> if this is the root element of the document. * @param nlIfElement <jk>true</jk> if we should add a newline to the output before serializing only if the object is an element and not text. * @return The type of content encountered. Either simple (no whitespace) or normal (elements with whitespace). * @throws SerializeException Generic serialization error occurred. */ @SuppressWarnings({ "rawtypes", "unchecked" }) protected ContentResult serializeAnything(XmlWriter out, Object o, ClassMeta<?> eType, String name, BeanPropertyMeta pMeta, int xIndent, boolean isRoot, boolean nlIfElement) throws SerializeException { ClassMeta<?> aType = null; // The actual type ClassMeta<?> wType = null; // The wrapped type (delegate) ClassMeta<?> sType = object(); // The serialized type if (eType == null) eType = object(); aType = push2(name, o, eType); // Handle recursion if (aType == null) { o = null; aType = object(); } // Handle Optional<X> if (isOptional(aType)) { o = getOptionalValue(o); eType = getOptionalType(eType); aType = getClassMetaForObject(o, object()); } indent += xIndent; ContentResult cr = CR_ELEMENTS; // Determine the type. if (o == null || (aType.isChar() && ((Character)o).charValue() == 0)) { out.tag("null"); cr = ContentResult.CR_MIXED; } else { if (aType.isDelegate()) { wType = aType; aType = ((Delegate)o).getClassMeta(); } sType = aType; String typeName = null; if (isAddBeanTypes() && ! eType.equals(aType)) typeName = aType.getDictionaryName(); // Swap if necessary ObjectSwap swap = aType.getSwap(this); if (swap != null) { o = swap(swap, o); sType = swap.getSwapClassMeta(this); // If the getSwapClass() method returns Object, we need to figure out // the actual type now. if (sType.isObject()) sType = getClassMetaForObject(o); } // Handle the case where we're serializing a raw stream. if (sType.isReader() || sType.isInputStream()) { pop(); indent -= xIndent; if (sType.isReader()) pipe((Reader)o, out, SerializerSession::handleThrown); else pipe((InputStream)o, out, SerializerSession::handleThrown); return ContentResult.CR_MIXED; } HtmlClassMeta cHtml = getHtmlClassMeta(sType); HtmlBeanPropertyMeta bpHtml = getHtmlBeanPropertyMeta(pMeta); HtmlRender render = Utils.firstNonNull(bpHtml.getRender(), cHtml.getRender()); if (render != null) { Object o2 = render.getContent(this, o); if (o2 != o) { indent -= xIndent; pop(); out.nl(indent); return serializeAnything(out, o2, null, typeName, null, xIndent, false, false); } } if (cHtml.isXml() || bpHtml.isXml()) { pop(); indent++; if (nlIfElement) out.nl(0); super.serializeAnything(out, o, null, null, null, null, false, XmlFormat.MIXED, false, false, null); indent -= xIndent+1; return cr; } else if (cHtml.isPlainText() || bpHtml.isPlainText()) { out.w(o == null ? "null" : o.toString()); cr = CR_MIXED; } else if (o == null || (sType.isChar() && ((Character)o).charValue() == 0)) { out.tag("null"); cr = CR_MIXED; } else if (sType.isNumber()) { if (eType.isNumber() && ! isRoot) out.append(o); else out.sTag("number").append(o).eTag("number"); cr = CR_MIXED; } else if (sType.isBoolean()) { if (eType.isBoolean() && ! isRoot) out.append(o); else out.sTag("boolean").append(o).eTag("boolean"); cr = CR_MIXED; } else if (sType.isMap() || (wType != null && wType.isMap())) { out.nlIf(! isRoot, xIndent+1); if (o instanceof BeanMap) serializeBeanMap(out, (BeanMap)o, eType, pMeta); else serializeMap(out, (Map)o, sType, eType.getKeyType(), eType.getValueType(), typeName, pMeta); } else if (sType.isBean()) { BeanMap m = toBeanMap(o); if (aType.hasAnnotation(HtmlLink.class)) { Value<String> uriProperty = Value.empty(), nameProperty = Value.empty(); aType.forEachAnnotation(HtmlLink.class, x -> isNotEmpty(x.uriProperty()), x -> uriProperty.set(x.uriProperty())); aType.forEachAnnotation(HtmlLink.class, x -> isNotEmpty(x.nameProperty()), x -> nameProperty.set(x.nameProperty())); Object urlProp = m.get(uriProperty.orElse("")); Object nameProp = m.get(nameProperty.orElse("")); out.oTag("a").attrUri("href", urlProp).w('>').text(nameProp).eTag("a"); cr = CR_MIXED; } else { out.nlIf(! isRoot, xIndent+2); serializeBeanMap(out, m, eType, pMeta); } } else if (sType.isCollection() || sType.isArray() || (wType != null && wType.isCollection())) { out.nlIf(! isRoot, xIndent+1); serializeCollection(out, o, sType, eType, name, pMeta); } else if (isUri(sType, pMeta, o)) { String label = getAnchorText(pMeta, o); out.oTag("a").attrUri("href", o).w('>'); out.text(label); out.eTag("a"); cr = CR_MIXED; } else { if (isRoot) out.sTag("string").text(toString(o)).eTag("string"); else out.text(toString(o)); cr = CR_MIXED; } } pop(); indent -= xIndent; return cr; } @SuppressWarnings({ "rawtypes", "unchecked" }) private void serializeMap(XmlWriter out, Map m, ClassMeta<?> sType, ClassMeta<?> eKeyType, ClassMeta<?> eValueType, String typeName, BeanPropertyMeta ppMeta) throws SerializeException { ClassMeta<?> keyType = eKeyType == null ? string() : eKeyType; ClassMeta<?> valueType = eValueType == null ? object() : eValueType; ClassMeta<?> aType = getClassMetaForObject(m); // The actual type HtmlClassMeta cHtml = getHtmlClassMeta(aType); HtmlBeanPropertyMeta bpHtml = getHtmlBeanPropertyMeta(ppMeta); int i = indent; out.oTag(i, "table"); if (typeName != null && ppMeta != null && ppMeta.getClassMeta() != aType) out.attr(getBeanTypePropertyName(sType), typeName); out.append(">").nl(i+1); if (isAddKeyValueTableHeaders() && ! (cHtml.isNoTableHeaders() || bpHtml.isNoTableHeaders())) { out.sTag(i+1, "tr").nl(i+2); out.sTag(i+2, "th").append("key").eTag("th").nl(i+3); out.sTag(i+2, "th").append("value").eTag("th").nl(i+3); out.ie(i+1).eTag("tr").nl(i+2); } forEachEntry(m, x -> serializeMapEntry(out, x, keyType, valueType, i, ppMeta)); out.ie(i).eTag("table").nl(i); } @SuppressWarnings("rawtypes") private void serializeMapEntry(XmlWriter out, Map.Entry e, ClassMeta<?> keyType, ClassMeta<?> valueType, int i, BeanPropertyMeta ppMeta) throws SerializeException { Object key = generalize(e.getKey(), keyType); Object value = null; try { value = e.getValue(); } catch (StackOverflowError t) { throw t; } catch (Throwable t) { onError(t, "Could not call getValue() on property ''{0}'', {1}", e.getKey(), t.getLocalizedMessage()); } String link = getLink(ppMeta); String style = getStyle(this, ppMeta, value); out.sTag(i+1, "tr").nl(i+2); out.oTag(i+2, "td"); if (style != null) out.attr("style", style); out.cTag(); if (link != null) out.oTag(i+3, "a").attrUri("href", link.replace("{#}", Utils.s(value))).cTag(); ContentResult cr = serializeAnything(out, key, keyType, null, null, 2, false, false); if (link != null) out.eTag("a"); if (cr == CR_ELEMENTS) out.i(i+2); out.eTag("td").nl(i+2); out.sTag(i+2, "td"); cr = serializeAnything(out, value, valueType, (key == null ? "_x0000_" : toString(key)), null, 2, false, true); if (cr == CR_ELEMENTS) out.ie(i+2); out.eTag("td").nl(i+2); out.ie(i+1).eTag("tr").nl(i+1); } private void serializeBeanMap(XmlWriter out, BeanMap<?> m, ClassMeta<?> eType, BeanPropertyMeta ppMeta) throws SerializeException { HtmlClassMeta cHtml = getHtmlClassMeta(m.getClassMeta()); HtmlBeanPropertyMeta bpHtml = getHtmlBeanPropertyMeta(ppMeta); int i = indent; out.oTag(i, "table"); String typeName = m.getMeta().getDictionaryName(); if (typeName != null && eType != m.getClassMeta()) out.attr(getBeanTypePropertyName(m.getClassMeta()), typeName); out.w('>').nl(i); if (isAddKeyValueTableHeaders() && ! (cHtml.isNoTableHeaders() || bpHtml.isNoTableHeaders())) { out.sTag(i+1, "tr").nl(i+1); out.sTag(i+2, "th").append("key").eTag("th").nl(i+2); out.sTag(i+2, "th").append("value").eTag("th").nl(i+2); out.ie(i+1).eTag("tr").nl(i+1); } Predicate<Object> checkNull = x -> isKeepNullProperties() || x != null; m.forEachValue(checkNull, (pMeta,key,value,thrown) -> { ClassMeta<?> cMeta = pMeta.getClassMeta(); if (thrown != null) onBeanGetterException(pMeta, thrown); if (canIgnoreValue(cMeta, key, value)) return; String link = null, anchorText = null; if (! cMeta.isCollectionOrArray()) { link = m.resolveVars(getLink(pMeta)); anchorText = m.resolveVars(getAnchorText(pMeta)); } if (anchorText != null) value = anchorText; out.sTag(i+1, "tr").nl(i+1); out.sTag(i+2, "td").text(key).eTag("td").nl(i+2); out.oTag(i+2, "td"); String style = getStyle(this, pMeta, value); if (style != null) out.attr("style", style); out.cTag(); try { if (link != null) out.oTag(i+3, "a").attrUri("href", link).cTag(); ContentResult cr = serializeAnything(out, value, cMeta, key, pMeta, 2, false, true); if (cr == CR_ELEMENTS) out.i(i+2); if (link != null) out.eTag("a"); } catch (SerializeException | Error e) { throw e; } catch (Throwable e) { onBeanGetterException(pMeta, e); } out.eTag("td").nl(i+2); out.ie(i+1).eTag("tr").nl(i+1); }); out.ie(i).eTag("table").nl(i); } @SuppressWarnings({ "rawtypes", "unchecked" }) private void serializeCollection(XmlWriter out, Object in, ClassMeta<?> sType, ClassMeta<?> eType, String name, BeanPropertyMeta ppMeta) throws SerializeException { HtmlClassMeta cHtml = getHtmlClassMeta(sType); HtmlBeanPropertyMeta bpHtml = getHtmlBeanPropertyMeta(ppMeta); Collection c = (sType.isCollection() ? (Collection)in : toList(sType.getInnerClass(), in)); boolean isCdc = cHtml.isHtmlCdc() || bpHtml.isHtmlCdc(); boolean isSdc = cHtml.isHtmlSdc() || bpHtml.isHtmlSdc(); boolean isDc = isCdc || isSdc; int i = indent; if (c.isEmpty()) { out.appendln(i, "<ul></ul>"); return; } String type2 = null; if (sType != eType) type2 = sType.getDictionaryName(); if (type2 == null) type2 = "array"; c = sort(c); String btpn = getBeanTypePropertyName(eType); // Look at the objects to see how we're going to handle them. Check the first object to see how we're going to // handle this. // If it's a map or bean, then we'll create a table. // Otherwise, we'll create a list. Object[] th = getTableHeaders(c, bpHtml); if (th != null) { out.oTag(i, "table").attr(btpn, type2).w('>').nl(i+1); if (th.length > 0) { out.sTag(i+1, "tr").nl(i+2); for (Object key : th) { out.sTag(i+2, "th"); out.text(convertToType(key, String.class)); out.eTag("th").nl(i+2); } out.ie(i+1).eTag("tr").nl(i+1); } else { th = null; } for (Object o : c) { ClassMeta<?> cm = getClassMetaForObject(o); if (cm != null && cm.getSwap(this) != null) { ObjectSwap swap = cm.getSwap(this); o = swap(swap, o); cm = swap.getSwapClassMeta(this); } out.oTag(i+1, "tr"); String typeName = (cm == null ? null : cm.getDictionaryName()); String typeProperty = getBeanTypePropertyName(cm); if (typeName != null && eType.getElementType() != cm) out.attr(typeProperty, typeName); out.cTag().nl(i+2); if (cm == null) { out.i(i+2); serializeAnything(out, o, null, null, null, 1, false, false); out.nl(0); } else if (cm.isMap() && ! (cm.isBeanMap())) { Map m2 = sort((Map)o); if (th == null) th = m2.keySet().toArray(new Object[m2.size()]); for (Object k : th) { out.sTag(i+2, "td"); ContentResult cr = serializeAnything(out, m2.get(k), eType.getElementType(), toString(k), null, 2, false, true); if (cr == CR_ELEMENTS) out.i(i+2); out.eTag("td").nl(i+2); } } else { BeanMap m2 = toBeanMap(o); if (th == null) th = m2.keySet().toArray(new Object[m2.size()]); for (Object k : th) { BeanMapEntry p = m2.getProperty(toString(k)); BeanPropertyMeta pMeta = p.getMeta(); if (pMeta.canRead()) { Object value = p.getValue(); String link = null, anchorText = null; if (! pMeta.getClassMeta().isCollectionOrArray()) { link = m2.resolveVars(getLink(pMeta)); anchorText = m2.resolveVars(getAnchorText(pMeta)); } if (anchorText != null) value = anchorText; String style = getStyle(this, pMeta, value); out.oTag(i+2, "td"); if (style != null) out.attr("style", style); out.cTag(); if (link != null) out.oTag("a").attrUri("href", link).cTag(); ContentResult cr = serializeAnything(out, value, pMeta.getClassMeta(), p.getKey().toString(), pMeta, 2, false, true); if (cr == CR_ELEMENTS) out.i(i+2); if (link != null) out.eTag("a"); out.eTag("td").nl(i+2); } } } out.ie(i+1).eTag("tr").nl(i+1); } out.ie(i).eTag("table").nl(i); } else { out.oTag(i, isDc ? "p" : "ul"); if (! type2.equals("array")) out.attr(btpn, type2); out.w('>').nl(i+1); boolean isFirst = true; for (Object o : c) { if (isDc && ! isFirst) out.append(isCdc ? ", " : " "); if (! isDc) out.oTag(i+1, "li"); String style = getStyle(this, ppMeta, o); String link = getLink(ppMeta); if (style != null && ! isDc) out.attr("style", style); if (! isDc) out.cTag(); if (link != null) out.oTag(i+2, "a").attrUri("href", link.replace("{#}", Utils.s(o))).cTag(); ContentResult cr = serializeAnything(out, o, eType.getElementType(), name, null, 1, false, true); if (link != null) out.eTag("a"); if (cr == CR_ELEMENTS) out.ie(i+1); if (! isDc) out.eTag("li").nl(i+1); isFirst = false; } out.ie(i).eTag(isDc ? "p" : "ul").nl(i); } } private HtmlRender<?> getRender(HtmlSerializerSession session, BeanPropertyMeta pMeta, Object value) { if (pMeta == null) return null; HtmlRender<?> render = getHtmlBeanPropertyMeta(pMeta).getRender(); if (render != null) return render; ClassMeta<?> cMeta = session.getClassMetaForObject(value); render = cMeta == null ? null : getHtmlClassMeta(cMeta).getRender(); return render; } @SuppressWarnings({"rawtypes","unchecked"}) private String getStyle(HtmlSerializerSession session, BeanPropertyMeta pMeta, Object value) { HtmlRender render = getRender(session, pMeta, value); return render == null ? null : render.getStyle(session, value); } private String getLink(BeanPropertyMeta pMeta) { return pMeta == null ? null : getHtmlBeanPropertyMeta(pMeta).getLink(); } private String getAnchorText(BeanPropertyMeta pMeta) { return pMeta == null ? null : getHtmlBeanPropertyMeta(pMeta).getAnchorText(); } /* * Returns the table column headers for the specified collection of objects. * Returns null if collection should not be serialized as a 2-dimensional table. * Returns an empty array if it should be treated as a table but without headers. * 2-dimensional tables are used for collections of objects that all have the same set of property names. */ @SuppressWarnings({ "rawtypes", "unchecked" }) private Object[] getTableHeaders(Collection c, HtmlBeanPropertyMeta bpHtml) throws SerializeException { if (c.isEmpty()) return null; c = sort(c); Object o1 = null; for (Object o : c) if (o != null) { o1 = o; break; } if (o1 == null) return null; ClassMeta<?> cm1 = getClassMetaForObject(o1); ObjectSwap swap = cm1.getSwap(this); o1 = swap(swap, o1); if (swap != null) cm1 = swap.getSwapClassMeta(this); if (cm1 == null || ! cm1.isMapOrBean() || cm1.hasAnnotation(HtmlLink.class)) return null; HtmlClassMeta cHtml = getHtmlClassMeta(cm1); if (cHtml.isNoTables() || bpHtml.isNoTables() || cHtml.isXml() || bpHtml.isXml() || canIgnoreValue(cm1, null, o1)) return null; if (cHtml.isNoTableHeaders() || bpHtml.isNoTableHeaders()) return new Object[0]; // If it's a non-bean map, only use table if all entries are also maps. if (cm1.isMap() && ! cm1.isBeanMap()) { Set<Object> set = set(); for (Object o : c) { o = swap(swap, o); if (! canIgnoreValue(cm1, null, o)) { if (! cm1.isInstance(o)) return null; forEachEntry((Map)o, x -> set.add(x.getKey())); } } return set.toArray(new Object[set.size()]); } // Must be a bean or BeanMap. for (Object o : c) { o = swap(swap, o); if (! canIgnoreValue(cm1, null, o)) { if (! cm1.isInstance(o)) return null; } } BeanMap<?> bm = toBeanMap(o1); return bm.keySet().toArray(new String[bm.size()]); } //----------------------------------------------------------------------------------------------------------------- // Properties //----------------------------------------------------------------------------------------------------------------- /** * Add <js>"_type"</js> properties when needed. * * @see HtmlSerializer.Builder#addBeanTypesHtml() * @return * <jk>true</jk> if <js>"_type"</js> properties will be added to beans if their type cannot be inferred * through reflection. */ @Override protected final boolean isAddBeanTypes() { return ctx.isAddBeanTypes(); } /** * Add key/value headers on bean/map tables. * * @see HtmlSerializer.Builder#addKeyValueTableHeaders() * @return * <jk>true</jk> if <bc>key</bc> and <bc>value</bc> column headers are added to tables. */ protected final boolean isAddKeyValueTableHeaders() { return ctx.isAddKeyValueTableHeaders(); } /** * Look for link labels in URIs. * * @see HtmlSerializer.Builder#disableDetectLabelParameters() * @return * <jk>true</jk> if we should ook for URL label parameters (e.g. <js>"?label=foobar"</js>). */ protected final boolean isDetectLabelParameters() { return ctx.isDetectLabelParameters(); } /** * Look for URLs in {@link String Strings}. * * @see HtmlSerializer.Builder#disableDetectLinksInStrings() * @return * <jk>true</jk> if we should automatically convert strings to URLs if they look like a URL. */ protected final boolean isDetectLinksInStrings() { return ctx.isDetectLinksInStrings(); } /** * Link label parameter name. * * @see HtmlSerializer.Builder#labelParameter(String) * @return * The parameter name to look for when resolving link labels. */ protected final String getLabelParameter() { return ctx.getLabelParameter(); } /** * Anchor text source. * * @see HtmlSerializer.Builder#uriAnchorText(AnchorText) * @return * When creating anchor tags (e.g. <code><xt>&lt;a</xt> <xa>href</xa>=<xs>'...'</xs> * <xt>&gt;</xt>text<xt>&lt;/a&gt;</xt></code>) in HTML, this setting defines what to set the inner text to. */ protected final AnchorText getUriAnchorText() { return ctx.getUriAnchorText(); } //----------------------------------------------------------------------------------------------------------------- // Extended metadata //----------------------------------------------------------------------------------------------------------------- /** * Returns the language-specific metadata on the specified class. * * @param cm The class to return the metadata on. * @return The metadata. */ protected HtmlClassMeta getHtmlClassMeta(ClassMeta<?> cm) { return ctx.getHtmlClassMeta(cm); } /** * Returns the language-specific metadata on the specified bean property. * * @param bpm The bean property to return the metadata on. * @return The metadata. */ protected HtmlBeanPropertyMeta getHtmlBeanPropertyMeta(BeanPropertyMeta bpm) { return ctx.getHtmlBeanPropertyMeta(bpm); } }
apache/ofbiz
35,128
applications/order/src/main/java/org/apache/ofbiz/order/order/OrderLookupServices.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.ofbiz.order.order; import java.math.BigDecimal; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.GeneralException; import org.apache.ofbiz.base.util.ObjectType; import org.apache.ofbiz.base.util.StringUtil; import org.apache.ofbiz.base.util.UtilGenerics; import org.apache.ofbiz.base.util.UtilMisc; import org.apache.ofbiz.base.util.UtilProperties; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.base.util.collections.PagedList; import org.apache.ofbiz.entity.Delegator; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.condition.EntityComparisonOperator; import org.apache.ofbiz.entity.condition.EntityCondition; import org.apache.ofbiz.entity.condition.EntityConditionList; import org.apache.ofbiz.entity.condition.EntityExpr; import org.apache.ofbiz.entity.condition.EntityOperator; import org.apache.ofbiz.entity.model.DynamicViewEntity; import org.apache.ofbiz.entity.model.ModelKeyMap; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.security.Security; import org.apache.ofbiz.service.DispatchContext; import org.apache.ofbiz.service.GenericServiceException; import org.apache.ofbiz.service.LocalDispatcher; import org.apache.ofbiz.service.ServiceUtil; import org.apache.ofbiz.widget.renderer.Paginator; /** * OrderLookupServices */ public class OrderLookupServices { public static final String module = OrderLookupServices.class.getName(); public static Map<String, Object> findOrders(DispatchContext dctx, Map<String, ? extends Object> context) { LocalDispatcher dispatcher = dctx.getDispatcher(); Delegator delegator = dctx.getDelegator(); Security security = dctx.getSecurity(); GenericValue userLogin = (GenericValue) context.get("userLogin"); Integer viewIndex = Paginator.getViewIndex(context, "viewIndex", 1); Integer viewSize = Paginator.getViewSize(context, "viewSize"); String showAll = (String) context.get("showAll"); String useEntryDate = (String) context.get("useEntryDate"); Locale locale = (Locale) context.get("locale"); if (showAll == null) { showAll = "N"; } // list of fields to select (initial list) Set<String> fieldsToSelect = new LinkedHashSet<String>(); fieldsToSelect.add("orderId"); fieldsToSelect.add("orderName"); fieldsToSelect.add("statusId"); fieldsToSelect.add("orderTypeId"); fieldsToSelect.add("orderDate"); fieldsToSelect.add("currencyUom"); fieldsToSelect.add("grandTotal"); fieldsToSelect.add("remainingSubTotal"); // sorting by order date newest first List<String> orderBy = UtilMisc.toList("-orderDate", "-orderId"); // list to hold the parameters List<String> paramList = new LinkedList<String>(); // list of conditions List<EntityCondition> conditions = new LinkedList<EntityCondition>(); // check security flag for purchase orders boolean canViewPo = security.hasEntityPermission("ORDERMGR", "_PURCHASE_VIEW", userLogin); if (!canViewPo) { conditions.add(EntityCondition.makeCondition("orderTypeId", EntityOperator.NOT_EQUAL, "PURCHASE_ORDER")); } // dynamic view entity DynamicViewEntity dve = new DynamicViewEntity(); dve.addMemberEntity("OH", "OrderHeader"); dve.addAliasAll("OH", "", null); // no prefix dve.addRelation("one-nofk", "", "OrderType", UtilMisc.toList(new ModelKeyMap("orderTypeId", "orderTypeId"))); dve.addRelation("one-nofk", "", "StatusItem", UtilMisc.toList(new ModelKeyMap("statusId", "statusId"))); // start the lookup String orderId = (String) context.get("orderId"); if (UtilValidate.isNotEmpty(orderId)) { paramList.add("orderId=" + orderId); conditions.add(makeExpr("orderId", orderId)); } // the base order header fields List<String> orderTypeList = UtilGenerics.checkList(context.get("orderTypeId")); if (orderTypeList != null) { List<EntityExpr> orExprs = new LinkedList<EntityExpr>(); for (String orderTypeId : orderTypeList) { paramList.add("orderTypeId=" + orderTypeId); if (!"PURCHASE_ORDER".equals(orderTypeId) || ("PURCHASE_ORDER".equals(orderTypeId) && canViewPo)) { orExprs.add(EntityCondition.makeCondition("orderTypeId", EntityOperator.EQUALS, orderTypeId)); } } conditions.add(EntityCondition.makeCondition(orExprs, EntityOperator.OR)); } String orderName = (String) context.get("orderName"); if (UtilValidate.isNotEmpty(orderName)) { paramList.add("orderName=" + orderName); conditions.add(makeExpr("orderName", orderName, true)); } List<String> orderStatusList = UtilGenerics.checkList(context.get("orderStatusId")); if (orderStatusList != null) { List<EntityCondition> orExprs = new LinkedList<EntityCondition>(); for (String orderStatusId : orderStatusList) { paramList.add("orderStatusId=" + orderStatusId); if ("PENDING".equals(orderStatusId)) { List<EntityExpr> pendExprs = new LinkedList<EntityExpr>(); pendExprs.add(EntityCondition.makeCondition("statusId", EntityOperator.EQUALS, "ORDER_CREATED")); pendExprs.add(EntityCondition.makeCondition("statusId", EntityOperator.EQUALS, "ORDER_PROCESSING")); pendExprs.add(EntityCondition.makeCondition("statusId", EntityOperator.EQUALS, "ORDER_APPROVED")); orExprs.add(EntityCondition.makeCondition(pendExprs, EntityOperator.OR)); } else { orExprs.add(EntityCondition.makeCondition("statusId", EntityOperator.EQUALS, orderStatusId)); } } conditions.add(EntityCondition.makeCondition(orExprs, EntityOperator.OR)); } List<String> productStoreList = UtilGenerics.checkList(context.get("productStoreId")); if (productStoreList != null) { List<EntityExpr> orExprs = new LinkedList<EntityExpr>(); for (String productStoreId : productStoreList) { paramList.add("productStoreId=" + productStoreId); orExprs.add(EntityCondition.makeCondition("productStoreId", EntityOperator.EQUALS, productStoreId)); } conditions.add(EntityCondition.makeCondition(orExprs, EntityOperator.OR)); } List<String> webSiteList = UtilGenerics.checkList(context.get("orderWebSiteId")); if (webSiteList != null) { List<EntityExpr> orExprs = new LinkedList<EntityExpr>(); for (String webSiteId : webSiteList) { paramList.add("webSiteId=" + webSiteId); orExprs.add(EntityCondition.makeCondition("webSiteId", EntityOperator.EQUALS, webSiteId)); } conditions.add(EntityCondition.makeCondition(orExprs, EntityOperator.OR)); } List<String> saleChannelList = UtilGenerics.checkList(context.get("salesChannelEnumId")); if (saleChannelList != null) { List<EntityExpr> orExprs = new LinkedList<EntityExpr>(); for (String salesChannelEnumId : saleChannelList) { paramList.add("salesChannelEnumId=" + salesChannelEnumId); orExprs.add(EntityCondition.makeCondition("salesChannelEnumId", EntityOperator.EQUALS, salesChannelEnumId)); } conditions.add(EntityCondition.makeCondition(orExprs, EntityOperator.OR)); } String createdBy = (String) context.get("createdBy"); if (UtilValidate.isNotEmpty(createdBy)) { paramList.add("createdBy=" + createdBy); conditions.add(makeExpr("createdBy", createdBy)); } String terminalId = (String) context.get("terminalId"); if (UtilValidate.isNotEmpty(terminalId)) { paramList.add("terminalId=" + terminalId); conditions.add(makeExpr("terminalId", terminalId)); } String transactionId = (String) context.get("transactionId"); if (UtilValidate.isNotEmpty(transactionId)) { paramList.add("transactionId=" + transactionId); conditions.add(makeExpr("transactionId", transactionId)); } String externalId = (String) context.get("externalId"); if (UtilValidate.isNotEmpty(externalId)) { paramList.add("externalId=" + externalId); conditions.add(makeExpr("externalId", externalId)); } String internalCode = (String) context.get("internalCode"); if (UtilValidate.isNotEmpty(internalCode)) { paramList.add("internalCode=" + internalCode); conditions.add(makeExpr("internalCode", internalCode)); } String dateField = "Y".equals(useEntryDate) ? "entryDate" : "orderDate"; String minDate = (String) context.get("minDate"); if (UtilValidate.isNotEmpty(minDate) && minDate.length() > 8) { minDate = minDate.trim(); if (minDate.length() < 14) minDate = minDate + " " + "00:00:00.000"; paramList.add("minDate=" + minDate); try { Object converted = ObjectType.simpleTypeConvert(minDate, "Timestamp", null, null); if (converted != null) { conditions.add(EntityCondition.makeCondition(dateField, EntityOperator.GREATER_THAN_EQUAL_TO, converted)); } } catch (GeneralException e) { Debug.logWarning(e.getMessage(), module); } } String maxDate = (String) context.get("maxDate"); if (UtilValidate.isNotEmpty(maxDate) && maxDate.length() > 8) { maxDate = maxDate.trim(); if (maxDate.length() < 14) maxDate = maxDate + " " + "23:59:59.999"; paramList.add("maxDate=" + maxDate); try { Object converted = ObjectType.simpleTypeConvert(maxDate, "Timestamp", null, null); if (converted != null) { conditions.add(EntityCondition.makeCondition("orderDate", EntityOperator.LESS_THAN_EQUAL_TO, converted)); } } catch (GeneralException e) { Debug.logWarning(e.getMessage(), module); } } // party (role) fields String userLoginId = (String) context.get("userLoginId"); String partyId = (String) context.get("partyId"); List<String> roleTypeList = UtilGenerics.checkList(context.get("roleTypeId")); if (UtilValidate.isNotEmpty(userLoginId) && UtilValidate.isEmpty(partyId)) { GenericValue ul = null; try { ul = EntityQuery.use(delegator).from("UserLogin").where("userLoginId", userLoginId).cache().queryOne(); } catch (GenericEntityException e) { Debug.logWarning(e.getMessage(), module); } if (ul != null) { partyId = ul.getString("partyId"); } } String isViewed = (String) context.get("isViewed"); if (UtilValidate.isNotEmpty(isViewed)) { paramList.add("isViewed=" + isViewed); conditions.add(makeExpr("isViewed", isViewed)); } // Shipment Method String shipmentMethod = (String) context.get("shipmentMethod"); if (UtilValidate.isNotEmpty(shipmentMethod)) { String carrierPartyId = shipmentMethod.substring(0, shipmentMethod.indexOf("@")); String ShippingMethodTypeId = shipmentMethod.substring(shipmentMethod.indexOf("@")+1); dve.addMemberEntity("OISG", "OrderItemShipGroup"); dve.addAlias("OISG", "shipmentMethodTypeId"); dve.addAlias("OISG", "carrierPartyId"); dve.addViewLink("OH", "OISG", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderId", "orderId"))); if (UtilValidate.isNotEmpty(carrierPartyId)) { paramList.add("carrierPartyId=" + carrierPartyId); conditions.add(makeExpr("carrierPartyId", carrierPartyId)); } if (UtilValidate.isNotEmpty(ShippingMethodTypeId)) { paramList.add("ShippingMethodTypeId=" + ShippingMethodTypeId); conditions.add(makeExpr("shipmentMethodTypeId", ShippingMethodTypeId)); } } // PaymentGatewayResponse String gatewayAvsResult = (String) context.get("gatewayAvsResult"); String gatewayScoreResult = (String) context.get("gatewayScoreResult"); if (UtilValidate.isNotEmpty(gatewayAvsResult) || UtilValidate.isNotEmpty(gatewayScoreResult)) { dve.addMemberEntity("OPP", "OrderPaymentPreference"); dve.addMemberEntity("PGR", "PaymentGatewayResponse"); dve.addAlias("OPP", "orderPaymentPreferenceId"); dve.addAlias("PGR", "gatewayAvsResult"); dve.addAlias("PGR", "gatewayScoreResult"); dve.addViewLink("OH", "OPP", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderId", "orderId"))); dve.addViewLink("OPP", "PGR", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderPaymentPreferenceId", "orderPaymentPreferenceId"))); } if (UtilValidate.isNotEmpty(gatewayAvsResult)) { paramList.add("gatewayAvsResult=" + gatewayAvsResult); conditions.add(EntityCondition.makeCondition("gatewayAvsResult", gatewayAvsResult)); } if (UtilValidate.isNotEmpty(gatewayScoreResult)) { paramList.add("gatewayScoreResult=" + gatewayScoreResult); conditions.add(EntityCondition.makeCondition("gatewayScoreResult", gatewayScoreResult)); } // add the role data to the view if (roleTypeList != null || partyId != null) { dve.addMemberEntity("OT", "OrderRole"); dve.addAlias("OT", "partyId"); dve.addAlias("OT", "roleTypeId"); dve.addViewLink("OH", "OT", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderId", "orderId"))); } if (UtilValidate.isNotEmpty(partyId)) { paramList.add("partyId=" + partyId); fieldsToSelect.add("partyId"); conditions.add(makeExpr("partyId", partyId)); } if (roleTypeList != null) { fieldsToSelect.add("roleTypeId"); List<EntityExpr> orExprs = new LinkedList<EntityExpr>(); for (String roleTypeId : roleTypeList) { paramList.add("roleTypeId=" + roleTypeId); orExprs.add(makeExpr("roleTypeId", roleTypeId)); } conditions.add(EntityCondition.makeCondition(orExprs, EntityOperator.OR)); } // order item fields String correspondingPoId = (String) context.get("correspondingPoId"); String subscriptionId = (String) context.get("subscriptionId"); String productId = (String) context.get("productId"); String budgetId = (String) context.get("budgetId"); String quoteId = (String) context.get("quoteId"); String goodIdentificationTypeId = (String) context.get("goodIdentificationTypeId"); String goodIdentificationIdValue = (String) context.get("goodIdentificationIdValue"); boolean hasGoodIdentification = UtilValidate.isNotEmpty(goodIdentificationTypeId) && UtilValidate.isNotEmpty(goodIdentificationIdValue); if (correspondingPoId != null || subscriptionId != null || productId != null || budgetId != null || quoteId != null || hasGoodIdentification) { dve.addMemberEntity("OI", "OrderItem"); dve.addAlias("OI", "correspondingPoId"); dve.addAlias("OI", "subscriptionId"); dve.addAlias("OI", "productId"); dve.addAlias("OI", "budgetId"); dve.addAlias("OI", "quoteId"); dve.addViewLink("OH", "OI", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderId", "orderId"))); if (hasGoodIdentification) { dve.addMemberEntity("GOODID", "GoodIdentification"); dve.addAlias("GOODID", "goodIdentificationTypeId"); dve.addAlias("GOODID", "idValue"); dve.addViewLink("OI", "GOODID", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("productId", "productId"))); paramList.add("goodIdentificationTypeId=" + goodIdentificationTypeId); conditions.add(makeExpr("goodIdentificationTypeId", goodIdentificationTypeId)); paramList.add("goodIdentificationIdValue=" + goodIdentificationIdValue); conditions.add(makeExpr("idValue", goodIdentificationIdValue)); } } if (UtilValidate.isNotEmpty(correspondingPoId)) { paramList.add("correspondingPoId=" + correspondingPoId); conditions.add(makeExpr("correspondingPoId", correspondingPoId)); } if (UtilValidate.isNotEmpty(subscriptionId)) { paramList.add("subscriptionId=" + subscriptionId); conditions.add(makeExpr("subscriptionId", subscriptionId)); } if (UtilValidate.isNotEmpty(productId)) { paramList.add("productId=" + productId); if (productId.startsWith("%") || productId.startsWith("*") || productId.endsWith("%") || productId.endsWith("*")) { conditions.add(makeExpr("productId", productId)); } else { GenericValue product = null; try { product = EntityQuery.use(delegator).from("Product").where("productId", productId).queryOne(); } catch (GenericEntityException e) { Debug.logWarning(e.getMessage(), module); } if (product != null) { String isVirtual = product.getString("isVirtual"); if (isVirtual != null && "Y".equals(isVirtual)) { List<EntityExpr> orExprs = new LinkedList<EntityExpr>(); orExprs.add(EntityCondition.makeCondition("productId", EntityOperator.EQUALS, productId)); Map<String, Object> varLookup = null; try { varLookup = dispatcher.runSync("getAllProductVariants", UtilMisc.toMap("productId", productId)); } catch (GenericServiceException e) { Debug.logWarning(e.getMessage(), module); } List<GenericValue> variants = UtilGenerics.checkList(varLookup.get("assocProducts")); if (variants != null) { for (GenericValue v : variants) { orExprs.add(EntityCondition.makeCondition("productId", EntityOperator.EQUALS, v.getString("productIdTo"))); } } conditions.add(EntityCondition.makeCondition(orExprs, EntityOperator.OR)); } else { conditions.add(EntityCondition.makeCondition("productId", EntityOperator.EQUALS, productId)); } } else { String failMsg = UtilProperties.getMessage("OrderErrorUiLabels", "OrderFindOrderProductInvalid", UtilMisc.toMap("productId", productId), locale); return ServiceUtil.returnFailure(failMsg); } } } if (UtilValidate.isNotEmpty(budgetId)) { paramList.add("budgetId=" + budgetId); conditions.add(makeExpr("budgetId", budgetId)); } if (UtilValidate.isNotEmpty(quoteId)) { paramList.add("quoteId=" + quoteId); conditions.add(makeExpr("quoteId", quoteId)); } // payment preference fields String billingAccountId = (String) context.get("billingAccountId"); String finAccountId = (String) context.get("finAccountId"); String cardNumber = (String) context.get("cardNumber"); String accountNumber = (String) context.get("accountNumber"); String paymentStatusId = (String) context.get("paymentStatusId"); if (UtilValidate.isNotEmpty(paymentStatusId)) { paramList.add("paymentStatusId=" + paymentStatusId); conditions.add(makeExpr("paymentStatusId", paymentStatusId)); } if (finAccountId != null || cardNumber != null || accountNumber != null || paymentStatusId != null) { dve.addMemberEntity("OP", "OrderPaymentPreference"); dve.addAlias("OP", "finAccountId"); dve.addAlias("OP", "paymentMethodId"); dve.addAlias("OP", "paymentStatusId", "statusId", null, false, false, null); dve.addViewLink("OH", "OP", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderId", "orderId"))); } // search by billing account ID if (UtilValidate.isNotEmpty(billingAccountId)) { paramList.add("billingAccountId=" + billingAccountId); conditions.add(makeExpr("billingAccountId", billingAccountId)); } // search by fin account ID if (UtilValidate.isNotEmpty(finAccountId)) { paramList.add("finAccountId=" + finAccountId); conditions.add(makeExpr("finAccountId", finAccountId)); } // search by card number if (UtilValidate.isNotEmpty(cardNumber)) { dve.addMemberEntity("CC", "CreditCard"); dve.addAlias("CC", "cardNumber"); dve.addViewLink("OP", "CC", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("paymentMethodId", "paymentMethodId"))); paramList.add("cardNumber=" + cardNumber); conditions.add(makeExpr("cardNumber", cardNumber)); } // search by eft account number if (UtilValidate.isNotEmpty(accountNumber)) { dve.addMemberEntity("EF", "EftAccount"); dve.addAlias("EF", "accountNumber"); dve.addViewLink("OP", "EF", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("paymentMethodId", "paymentMethodId"))); paramList.add("accountNumber=" + accountNumber); conditions.add(makeExpr("accountNumber", accountNumber)); } // shipment/inventory item String inventoryItemId = (String) context.get("inventoryItemId"); String softIdentifier = (String) context.get("softIdentifier"); String serialNumber = (String) context.get("serialNumber"); String shipmentId = (String) context.get("shipmentId"); if (shipmentId != null || inventoryItemId != null || softIdentifier != null || serialNumber != null) { dve.addMemberEntity("II", "ItemIssuance"); dve.addAlias("II", "shipmentId"); dve.addAlias("II", "inventoryItemId"); dve.addViewLink("OH", "II", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderId", "orderId"))); if (softIdentifier != null || serialNumber != null) { dve.addMemberEntity("IV", "InventoryItem"); dve.addAlias("IV", "softIdentifier"); dve.addAlias("IV", "serialNumber"); dve.addViewLink("II", "IV", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("inventoryItemId", "inventoryItemId"))); } } if (UtilValidate.isNotEmpty(inventoryItemId)) { paramList.add("inventoryItemId=" + inventoryItemId); conditions.add(makeExpr("inventoryItemId", inventoryItemId)); } if (UtilValidate.isNotEmpty(softIdentifier)) { paramList.add("softIdentifier=" + softIdentifier); conditions.add(makeExpr("softIdentifier", softIdentifier, true)); } if (UtilValidate.isNotEmpty(serialNumber)) { paramList.add("serialNumber=" + serialNumber); conditions.add(makeExpr("serialNumber", serialNumber, true)); } if (UtilValidate.isNotEmpty(shipmentId)) { paramList.add("shipmentId=" + shipmentId); conditions.add(makeExpr("shipmentId", shipmentId)); } // back order checking String hasBackOrders = (String) context.get("hasBackOrders"); if (UtilValidate.isNotEmpty(hasBackOrders)) { dve.addMemberEntity("IR", "OrderItemShipGrpInvRes"); dve.addAlias("IR", "quantityNotAvailable"); dve.addViewLink("OH", "IR", Boolean.FALSE, UtilMisc.toList(new ModelKeyMap("orderId", "orderId"))); paramList.add("hasBackOrders=" + hasBackOrders); if ("Y".equals(hasBackOrders)) { conditions.add(EntityCondition.makeCondition("quantityNotAvailable", EntityOperator.NOT_EQUAL, null)); conditions.add(EntityCondition.makeCondition("quantityNotAvailable", EntityOperator.GREATER_THAN, BigDecimal.ZERO)); } else if ("N".equals(hasBackOrders)) { List<EntityExpr> orExpr = new LinkedList<EntityExpr>(); orExpr.add(EntityCondition.makeCondition("quantityNotAvailable", EntityOperator.EQUALS, null)); orExpr.add(EntityCondition.makeCondition("quantityNotAvailable", EntityOperator.EQUALS, BigDecimal.ZERO)); conditions.add(EntityCondition.makeCondition(orExpr, EntityOperator.OR)); } } // Get all orders according to specific ship to country with "Only Include" or "Do not Include". String countryGeoId = (String) context.get("countryGeoId"); String includeCountry = (String) context.get("includeCountry"); if (UtilValidate.isNotEmpty(countryGeoId) && UtilValidate.isNotEmpty(includeCountry)) { paramList.add("countryGeoId=" + countryGeoId); paramList.add("includeCountry=" + includeCountry); // add condition to dynamic view dve.addMemberEntity("OCM", "OrderContactMech"); dve.addMemberEntity("PA", "PostalAddress"); dve.addAlias("OCM", "contactMechId"); dve.addAlias("OCM", "contactMechPurposeTypeId"); dve.addAlias("PA", "countryGeoId"); dve.addViewLink("OH", "OCM", Boolean.FALSE, ModelKeyMap.makeKeyMapList("orderId")); dve.addViewLink("OCM", "PA", Boolean.FALSE, ModelKeyMap.makeKeyMapList("contactMechId")); EntityConditionList<EntityExpr> exprs = null; if ("Y".equals(includeCountry)) { exprs = EntityCondition.makeCondition(UtilMisc.toList( EntityCondition.makeCondition("contactMechPurposeTypeId", "SHIPPING_LOCATION"), EntityCondition.makeCondition("countryGeoId", countryGeoId)), EntityOperator.AND); } else { exprs = EntityCondition.makeCondition(UtilMisc.toList( EntityCondition.makeCondition("contactMechPurposeTypeId", "SHIPPING_LOCATION"), EntityCondition.makeCondition("countryGeoId", EntityOperator.NOT_EQUAL, countryGeoId)), EntityOperator.AND); } conditions.add(exprs); } // create the main condition EntityCondition cond = null; if (conditions.size() > 0 || showAll.equalsIgnoreCase("Y")) { cond = EntityCondition.makeCondition(conditions, EntityOperator.AND); } if (Debug.verboseOn()) { Debug.logInfo("Find order query: " + cond.toString(), module); } List<GenericValue> orderList = new LinkedList<GenericValue>(); int orderCount = 0; // get the index for the partial list int lowIndex = 0; int highIndex = 0; if (cond != null) { PagedList<GenericValue> pagedOrderList = null; try { // do the lookup pagedOrderList = EntityQuery.use(delegator) .select(fieldsToSelect) .from(dve) .where(cond) .orderBy(orderBy) .distinct() // set distinct on so we only get one row per order .cursorScrollInsensitive() .queryPagedList(viewIndex - 1, viewSize); orderCount = pagedOrderList.getSize(); lowIndex = pagedOrderList.getStartIndex(); highIndex = pagedOrderList.getEndIndex(); orderList = pagedOrderList.getData(); } catch (GenericEntityException e) { Debug.logError(e, module); return ServiceUtil.returnError(e.getMessage()); } } // create the result map Map<String, Object> result = ServiceUtil.returnSuccess(); // filter out requested inventory problems filterInventoryProblems(context, result, orderList, paramList); // format the param list String paramString = StringUtil.join(paramList, "&amp;"); result.put("highIndex", Integer.valueOf(highIndex)); result.put("lowIndex", Integer.valueOf(lowIndex)); result.put("viewIndex", viewIndex); result.put("viewSize", viewSize); result.put("showAll", showAll); result.put("paramList", (paramString != null? paramString: "")); result.put("orderList", orderList); result.put("orderListSize", Integer.valueOf(orderCount)); return result; } public static void filterInventoryProblems(Map<String, ? extends Object> context, Map<String, Object> result, List<GenericValue> orderList, List<String> paramList) { List<String> filterInventoryProblems = new LinkedList<String>(); String doFilter = (String) context.get("filterInventoryProblems"); if (doFilter == null) { doFilter = "N"; } if ("Y".equals(doFilter) && orderList.size() > 0) { paramList.add("filterInventoryProblems=Y"); for (GenericValue orderHeader : orderList) { OrderReadHelper orh = new OrderReadHelper(orderHeader); BigDecimal backorderQty = orh.getOrderBackorderQuantity(); if (backorderQty.compareTo(BigDecimal.ZERO) == 1) { filterInventoryProblems.add(orh.getOrderId()); } } } List<String> filterPOsOpenPastTheirETA = new LinkedList<String>(); List<String> filterPOsWithRejectedItems = new LinkedList<String>(); List<String> filterPartiallyReceivedPOs = new LinkedList<String>(); String filterPOReject = (String) context.get("filterPOsWithRejectedItems"); String filterPOPast = (String) context.get("filterPOsOpenPastTheirETA"); String filterPartRec = (String) context.get("filterPartiallyReceivedPOs"); if (filterPOReject == null) { filterPOReject = "N"; } if (filterPOPast == null) { filterPOPast = "N"; } if (filterPartRec == null) { filterPartRec = "N"; } boolean doPoFilter = false; if ("Y".equals(filterPOReject)) { paramList.add("filterPOsWithRejectedItems=Y"); doPoFilter = true; } if ("Y".equals(filterPOPast)) { paramList.add("filterPOsOpenPastTheirETA=Y"); doPoFilter = true; } if ("Y".equals(filterPartRec)) { paramList.add("filterPartiallyReceivedPOs=Y"); doPoFilter = true; } if (doPoFilter && orderList.size() > 0) { for (GenericValue orderHeader : orderList) { OrderReadHelper orh = new OrderReadHelper(orderHeader); String orderType = orh.getOrderTypeId(); String orderId = orh.getOrderId(); if ("PURCHASE_ORDER".equals(orderType)) { if ("Y".equals(filterPOReject) && orh.getRejectedOrderItems()) { filterPOsWithRejectedItems.add(orderId); } else if ("Y".equals(filterPOPast) && orh.getPastEtaOrderItems(orderId)) { filterPOsOpenPastTheirETA.add(orderId); } else if ("Y".equals(filterPartRec) && orh.getPartiallyReceivedItems()) { filterPartiallyReceivedPOs.add(orderId); } } } } result.put("filterInventoryProblemsList", filterInventoryProblems); result.put("filterPOsWithRejectedItemsList", filterPOsWithRejectedItems); result.put("filterPOsOpenPastTheirETAList", filterPOsOpenPastTheirETA); result.put("filterPartiallyReceivedPOsList", filterPartiallyReceivedPOs); } protected static EntityExpr makeExpr(String fieldName, String value) { return makeExpr(fieldName, value, false); } protected static EntityExpr makeExpr(String fieldName, String value, boolean forceLike) { EntityComparisonOperator<?, ?> op = forceLike ? EntityOperator.LIKE : EntityOperator.EQUALS; if (value.startsWith("*")) { op = EntityOperator.LIKE; value = "%" + value.substring(1); } else if (value.startsWith("%")) { op = EntityOperator.LIKE; } if (value.endsWith("*")) { op = EntityOperator.LIKE; value = value.substring(0, value.length() - 1) + "%"; } else if (value.endsWith("%")) { op = EntityOperator.LIKE; } if (forceLike) { if (!value.startsWith("%")) { value = "%" + value; } if (!value.endsWith("%")) { value = value + "%"; } } return EntityCondition.makeCondition(fieldName, op, value); } }
googleapis/google-cloud-java
35,410
java-developerconnect/proto-google-cloud-developerconnect-v1/src/main/java/com/google/cloud/developerconnect/v1/FetchReadWriteTokenResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/developerconnect/v1/developer_connect.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.developerconnect.v1; /** * * * <pre> * Message for responding to get read/write token. * </pre> * * Protobuf type {@code google.cloud.developerconnect.v1.FetchReadWriteTokenResponse} */ public final class FetchReadWriteTokenResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.developerconnect.v1.FetchReadWriteTokenResponse) FetchReadWriteTokenResponseOrBuilder { private static final long serialVersionUID = 0L; // Use FetchReadWriteTokenResponse.newBuilder() to construct. private FetchReadWriteTokenResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FetchReadWriteTokenResponse() { token_ = ""; gitUsername_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FetchReadWriteTokenResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadWriteTokenResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadWriteTokenResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse.class, com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse.Builder.class); } private int bitField0_; public static final int TOKEN_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object token_ = ""; /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The token. */ @java.lang.Override public java.lang.String getToken() { java.lang.Object ref = token_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); token_ = s; return s; } } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The bytes for token. */ @java.lang.Override public com.google.protobuf.ByteString getTokenBytes() { java.lang.Object ref = token_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); token_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EXPIRATION_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp expirationTime_; /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return Whether the expirationTime field is set. */ @java.lang.Override public boolean hasExpirationTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return The expirationTime. */ @java.lang.Override public com.google.protobuf.Timestamp getExpirationTime() { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } public static final int GIT_USERNAME_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object gitUsername_ = ""; /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The gitUsername. */ @java.lang.Override public java.lang.String getGitUsername() { java.lang.Object ref = gitUsername_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gitUsername_ = s; return s; } } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The bytes for gitUsername. */ @java.lang.Override public com.google.protobuf.ByteString getGitUsernameBytes() { java.lang.Object ref = gitUsername_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gitUsername_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(token_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, token_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getExpirationTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gitUsername_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, gitUsername_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(token_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, token_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExpirationTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gitUsername_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, gitUsername_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse)) { return super.equals(obj); } com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse other = (com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse) obj; if (!getToken().equals(other.getToken())) return false; if (hasExpirationTime() != other.hasExpirationTime()) return false; if (hasExpirationTime()) { if (!getExpirationTime().equals(other.getExpirationTime())) return false; } if (!getGitUsername().equals(other.getGitUsername())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TOKEN_FIELD_NUMBER; hash = (53 * hash) + getToken().hashCode(); if (hasExpirationTime()) { hash = (37 * hash) + EXPIRATION_TIME_FIELD_NUMBER; hash = (53 * hash) + getExpirationTime().hashCode(); } hash = (37 * hash) + GIT_USERNAME_FIELD_NUMBER; hash = (53 * hash) + getGitUsername().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for responding to get read/write token. * </pre> * * Protobuf type {@code google.cloud.developerconnect.v1.FetchReadWriteTokenResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.developerconnect.v1.FetchReadWriteTokenResponse) com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadWriteTokenResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadWriteTokenResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse.class, com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse.Builder.class); } // Construct using com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getExpirationTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; token_ = ""; expirationTime_ = null; if (expirationTimeBuilder_ != null) { expirationTimeBuilder_.dispose(); expirationTimeBuilder_ = null; } gitUsername_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_FetchReadWriteTokenResponse_descriptor; } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse getDefaultInstanceForType() { return com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse build() { com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse buildPartial() { com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse result = new com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.token_ = token_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.expirationTime_ = expirationTimeBuilder_ == null ? expirationTime_ : expirationTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.gitUsername_ = gitUsername_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse) { return mergeFrom((com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse other) { if (other == com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse.getDefaultInstance()) return this; if (!other.getToken().isEmpty()) { token_ = other.token_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasExpirationTime()) { mergeExpirationTime(other.getExpirationTime()); } if (!other.getGitUsername().isEmpty()) { gitUsername_ = other.gitUsername_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { token_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getExpirationTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { gitUsername_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object token_ = ""; /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The token. */ public java.lang.String getToken() { java.lang.Object ref = token_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); token_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return The bytes for token. */ public com.google.protobuf.ByteString getTokenBytes() { java.lang.Object ref = token_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); token_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @param value The token to set. * @return This builder for chaining. */ public Builder setToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } token_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @return This builder for chaining. */ public Builder clearToken() { token_ = getDefaultInstance().getToken(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The token content. * </pre> * * <code>string token = 1;</code> * * @param value The bytes for token to set. * @return This builder for chaining. */ public Builder setTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); token_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.Timestamp expirationTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> expirationTimeBuilder_; /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return Whether the expirationTime field is set. */ public boolean hasExpirationTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> * * @return The expirationTime. */ public com.google.protobuf.Timestamp getExpirationTime() { if (expirationTimeBuilder_ == null) { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } else { return expirationTimeBuilder_.getMessage(); } } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder setExpirationTime(com.google.protobuf.Timestamp value) { if (expirationTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } expirationTime_ = value; } else { expirationTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder setExpirationTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (expirationTimeBuilder_ == null) { expirationTime_ = builderForValue.build(); } else { expirationTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder mergeExpirationTime(com.google.protobuf.Timestamp value) { if (expirationTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && expirationTime_ != null && expirationTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getExpirationTimeBuilder().mergeFrom(value); } else { expirationTime_ = value; } } else { expirationTimeBuilder_.mergeFrom(value); } if (expirationTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public Builder clearExpirationTime() { bitField0_ = (bitField0_ & ~0x00000002); expirationTime_ = null; if (expirationTimeBuilder_ != null) { expirationTimeBuilder_.dispose(); expirationTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getExpirationTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExpirationTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { if (expirationTimeBuilder_ != null) { return expirationTimeBuilder_.getMessageOrBuilder(); } else { return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; } } /** * * * <pre> * Expiration timestamp. Can be empty if unknown or non-expiring. * </pre> * * <code>.google.protobuf.Timestamp expiration_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getExpirationTimeFieldBuilder() { if (expirationTimeBuilder_ == null) { expirationTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getExpirationTime(), getParentForChildren(), isClean()); expirationTime_ = null; } return expirationTimeBuilder_; } private java.lang.Object gitUsername_ = ""; /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The gitUsername. */ public java.lang.String getGitUsername() { java.lang.Object ref = gitUsername_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gitUsername_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return The bytes for gitUsername. */ public com.google.protobuf.ByteString getGitUsernameBytes() { java.lang.Object ref = gitUsername_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gitUsername_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @param value The gitUsername to set. * @return This builder for chaining. */ public Builder setGitUsername(java.lang.String value) { if (value == null) { throw new NullPointerException(); } gitUsername_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @return This builder for chaining. */ public Builder clearGitUsername() { gitUsername_ = getDefaultInstance().getGitUsername(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The git_username to specify when making a git clone with the * token. For example, for GitHub GitRepositoryLinks, this would be * "x-access-token" * </pre> * * <code>string git_username = 3;</code> * * @param value The bytes for gitUsername to set. * @return This builder for chaining. */ public Builder setGitUsernameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); gitUsername_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.developerconnect.v1.FetchReadWriteTokenResponse) } // @@protoc_insertion_point(class_scope:google.cloud.developerconnect.v1.FetchReadWriteTokenResponse) private static final com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse(); } public static com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FetchReadWriteTokenResponse> PARSER = new com.google.protobuf.AbstractParser<FetchReadWriteTokenResponse>() { @java.lang.Override public FetchReadWriteTokenResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FetchReadWriteTokenResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FetchReadWriteTokenResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.developerconnect.v1.FetchReadWriteTokenResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,489
java-batch/proto-google-cloud-batch-v1alpha/src/main/java/com/google/cloud/batch/v1alpha/DeleteJobRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/batch/v1alpha/batch.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.batch.v1alpha; /** * * * <pre> * DeleteJob Request. * </pre> * * Protobuf type {@code google.cloud.batch.v1alpha.DeleteJobRequest} */ public final class DeleteJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.batch.v1alpha.DeleteJobRequest) DeleteJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteJobRequest.newBuilder() to construct. private DeleteJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteJobRequest() { name_ = ""; reason_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.batch.v1alpha.BatchProto .internal_static_google_cloud_batch_v1alpha_DeleteJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.batch.v1alpha.BatchProto .internal_static_google_cloud_batch_v1alpha_DeleteJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.batch.v1alpha.DeleteJobRequest.class, com.google.cloud.batch.v1alpha.DeleteJobRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REASON_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object reason_ = ""; /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The reason. */ @java.lang.Override public java.lang.String getReason() { java.lang.Object ref = reason_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); reason_ = s; return s; } } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for reason. */ @java.lang.Override public com.google.protobuf.ByteString getReasonBytes() { java.lang.Object ref = reason_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); reason_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(reason_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, reason_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, requestId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(reason_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, reason_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, requestId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.batch.v1alpha.DeleteJobRequest)) { return super.equals(obj); } com.google.cloud.batch.v1alpha.DeleteJobRequest other = (com.google.cloud.batch.v1alpha.DeleteJobRequest) obj; if (!getName().equals(other.getName())) return false; if (!getReason().equals(other.getReason())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REASON_FIELD_NUMBER; hash = (53 * hash) + getReason().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.batch.v1alpha.DeleteJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * DeleteJob Request. * </pre> * * Protobuf type {@code google.cloud.batch.v1alpha.DeleteJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.batch.v1alpha.DeleteJobRequest) com.google.cloud.batch.v1alpha.DeleteJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.batch.v1alpha.BatchProto .internal_static_google_cloud_batch_v1alpha_DeleteJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.batch.v1alpha.BatchProto .internal_static_google_cloud_batch_v1alpha_DeleteJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.batch.v1alpha.DeleteJobRequest.class, com.google.cloud.batch.v1alpha.DeleteJobRequest.Builder.class); } // Construct using com.google.cloud.batch.v1alpha.DeleteJobRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; reason_ = ""; requestId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.batch.v1alpha.BatchProto .internal_static_google_cloud_batch_v1alpha_DeleteJobRequest_descriptor; } @java.lang.Override public com.google.cloud.batch.v1alpha.DeleteJobRequest getDefaultInstanceForType() { return com.google.cloud.batch.v1alpha.DeleteJobRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.batch.v1alpha.DeleteJobRequest build() { com.google.cloud.batch.v1alpha.DeleteJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.batch.v1alpha.DeleteJobRequest buildPartial() { com.google.cloud.batch.v1alpha.DeleteJobRequest result = new com.google.cloud.batch.v1alpha.DeleteJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.batch.v1alpha.DeleteJobRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.reason_ = reason_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.requestId_ = requestId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.batch.v1alpha.DeleteJobRequest) { return mergeFrom((com.google.cloud.batch.v1alpha.DeleteJobRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.batch.v1alpha.DeleteJobRequest other) { if (other == com.google.cloud.batch.v1alpha.DeleteJobRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getReason().isEmpty()) { reason_ = other.reason_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { reason_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Job name. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object reason_ = ""; /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The reason. */ public java.lang.String getReason() { java.lang.Object ref = reason_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); reason_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for reason. */ public com.google.protobuf.ByteString getReasonBytes() { java.lang.Object ref = reason_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); reason_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The reason to set. * @return This builder for chaining. */ public Builder setReason(java.lang.String value) { if (value == null) { throw new NullPointerException(); } reason_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearReason() { reason_ = getDefaultInstance().getReason(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. Reason for this deletion. * </pre> * * <code>string reason = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for reason to set. * @return This builder for chaining. */ public Builder setReasonBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); reason_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.batch.v1alpha.DeleteJobRequest) } // @@protoc_insertion_point(class_scope:google.cloud.batch.v1alpha.DeleteJobRequest) private static final com.google.cloud.batch.v1alpha.DeleteJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.batch.v1alpha.DeleteJobRequest(); } public static com.google.cloud.batch.v1alpha.DeleteJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteJobRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteJobRequest>() { @java.lang.Override public DeleteJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.batch.v1alpha.DeleteJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/java-genai
35,832
src/main/java/com/google/genai/AsyncModels.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Auto-generated code. Do not edit. package com.google.genai; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.genai.Common.BuiltRequest; import com.google.genai.errors.GenAiIOException; import com.google.genai.types.ComputeTokensConfig; import com.google.genai.types.ComputeTokensResponse; import com.google.genai.types.Content; import com.google.genai.types.CountTokensConfig; import com.google.genai.types.CountTokensResponse; import com.google.genai.types.DeleteModelConfig; import com.google.genai.types.DeleteModelResponse; import com.google.genai.types.EditImageConfig; import com.google.genai.types.EditImageResponse; import com.google.genai.types.EmbedContentConfig; import com.google.genai.types.EmbedContentResponse; import com.google.genai.types.GenerateContentConfig; import com.google.genai.types.GenerateContentResponse; import com.google.genai.types.GenerateImagesConfig; import com.google.genai.types.GenerateImagesResponse; import com.google.genai.types.GenerateVideosConfig; import com.google.genai.types.GenerateVideosOperation; import com.google.genai.types.GenerateVideosSource; import com.google.genai.types.GetModelConfig; import com.google.genai.types.Image; import com.google.genai.types.ListModelsConfig; import com.google.genai.types.ListModelsResponse; import com.google.genai.types.Model; import com.google.genai.types.Part; import com.google.genai.types.RecontextImageConfig; import com.google.genai.types.RecontextImageResponse; import com.google.genai.types.RecontextImageSource; import com.google.genai.types.ReferenceImage; import com.google.genai.types.ReferenceImageAPI; import com.google.genai.types.SegmentImageConfig; import com.google.genai.types.SegmentImageResponse; import com.google.genai.types.SegmentImageSource; import com.google.genai.types.UpdateModelConfig; import com.google.genai.types.UpscaleImageAPIConfig; import com.google.genai.types.UpscaleImageConfig; import com.google.genai.types.UpscaleImageResponse; import com.google.genai.types.Video; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.logging.Logger; /** Async module of {@link Models} */ public final class AsyncModels { Models models; ApiClient apiClient; public AsyncModels(ApiClient apiClient) { this.apiClient = apiClient; this.models = new Models(apiClient); } CompletableFuture<GenerateContentResponse> privateGenerateContent( String model, List<Content> contents, GenerateContentConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateGenerateContent(model, contents, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForPrivateGenerateContent(res, config); } }); } CompletableFuture<ResponseStream<GenerateContentResponse>> privateGenerateContentStream( String model, List<Content> contents, GenerateContentConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateGenerateContentStream(model, contents, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { return models.processResponseForPrivateGenerateContentStream(response, config); }); } CompletableFuture<EmbedContentResponse> privateEmbedContent( String model, List<Content> contents, EmbedContentConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateEmbedContent(model, contents, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForPrivateEmbedContent(res, config); } }); } /** Asynchronously private method for generating images. */ CompletableFuture<GenerateImagesResponse> privateGenerateImages( String model, String prompt, GenerateImagesConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateGenerateImages(model, prompt, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForPrivateGenerateImages(res, config); } }); } /** Asynchronously private method for editing an image. */ CompletableFuture<EditImageResponse> privateEditImage( String model, String prompt, List<ReferenceImageAPI> referenceImages, EditImageConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateEditImage(model, prompt, referenceImages, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForPrivateEditImage(res, config); } }); } /** Asynchronously private method for upscaling an image. */ CompletableFuture<UpscaleImageResponse> privateUpscaleImage( String model, Image image, String upscaleFactor, UpscaleImageAPIConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateUpscaleImage(model, image, upscaleFactor, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForPrivateUpscaleImage(res, config); } }); } /** * Asynchronously recontextualizes an image. * * <p>There are two types of recontextualization currently supported: 1) Imagen Product Recontext * - Generate images of products in new scenes and contexts. 2) Virtual Try-On: Generate images of * persons modeling fashion products. * * @param model the name of the GenAI model to use for image recontext * @param source a {@link com.google.genai.types.RecontextImageSource} An object containing the * source inputs (prompt, personImage, productImages) for image recontext. prompt is optional * for product recontext and disallowed for virtual try-on. personImage is required for * virtual try-on, disallowed for product recontext. productImages is required for both * product recontext and virtual try-on. Only one product image is supported for virtual * try-on, and up to 3 product images (different angles of the same product) are supported for * product recontext. * @param config a {@link com.google.genai.types.RecontextImageConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.RecontextImageResponse} instance that contains the * generated images. */ public CompletableFuture<RecontextImageResponse> recontextImage( String model, RecontextImageSource source, RecontextImageConfig config) { BuiltRequest builtRequest = models.buildRequestForRecontextImage(model, source, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForRecontextImage(res, config); } }); } /** * Asynchronously segments an image, creating a mask of a specified area. * * @param model the name of the GenAI model to use for image segmentation * @param source a {@link com.google.genai.types.SegmentImageSource} An object containing the * source inputs (prompt, image, scribbleImmage) for image segmentation. The prompt is * required for prompt mode and semantic mode, disallowed for other modes. scribbleImage is * required for the interactive mode, disallowed for other modes. * @param config a {@link com.google.genai.types.SegmentImageConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.SegmentImageResponse} instance that contains the * generated mask. */ public CompletableFuture<SegmentImageResponse> segmentImage( String model, SegmentImageSource source, SegmentImageConfig config) { BuiltRequest builtRequest = models.buildRequestForSegmentImage(model, source, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForSegmentImage(res, config); } }); } /** * Asynchronously fetches information about a model by name. * * @example ```java Model model = client.models.get("gemini-2.0-flash"); ``` */ public CompletableFuture<Model> get(String model, GetModelConfig config) { BuiltRequest builtRequest = models.buildRequestForGet(model, config); return this.apiClient .asyncRequest("get", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForGet(res, config); } }); } CompletableFuture<ListModelsResponse> privateList(ListModelsConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateList(config); return this.apiClient .asyncRequest("get", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForPrivateList(res, config); } }); } /** * Asynchronously updates a tuned model by its name. * * @param model The name of the tuned model to update * @param config A {@link com.google.genai.types.UpdateModelConfig} instance that specifies the * optional configurations * @return A {@link com.google.genai.types.Model} instance * @example ```java Model model = client.models.update( "tunedModels/12345", * UpdateModelConfig.builder() .displayName("New display name") .description("New * description") .build()); ``` */ public CompletableFuture<Model> update(String model, UpdateModelConfig config) { BuiltRequest builtRequest = models.buildRequestForUpdate(model, config); return this.apiClient .asyncRequest("patch", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForUpdate(res, config); } }); } /** * Asynchronously fetches information about a model by name. * * @example ```java Model model = client.models.delete("tunedModels/12345"); ``` */ public CompletableFuture<DeleteModelResponse> delete(String model, DeleteModelConfig config) { BuiltRequest builtRequest = models.buildRequestForDelete(model, config); return this.apiClient .asyncRequest("delete", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForDelete(res, config); } }); } /** * Asynchronously counts tokens given a GenAI model and a list of content. * * @param model the name of the GenAI model to use. * @param contents a {@link List<com.google.genai.types.Content>} to send to count tokens for. * @param config a {@link com.google.genai.types.CountTokensConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.CountTokensResponse} instance that contains tokens * count. */ public CompletableFuture<CountTokensResponse> countTokens( String model, List<Content> contents, CountTokensConfig config) { BuiltRequest builtRequest = models.buildRequestForCountTokens(model, contents, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForCountTokens(res, config); } }); } /** * Asynchronously computes tokens given a GenAI model and a list of content. * * @param model the name of the GenAI model to use. * @param contents a {@link List<com.google.genai.types.Content>} to send to compute tokens for. * @param config a {@link com.google.genai.types.ComputeTokensConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.ComputeTokensResponse} instance that contains tokens * results. */ public CompletableFuture<ComputeTokensResponse> computeTokens( String model, List<Content> contents, ComputeTokensConfig config) { BuiltRequest builtRequest = models.buildRequestForComputeTokens(model, contents, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForComputeTokens(res, config); } }); } /** Asynchronously private method for generating videos. */ CompletableFuture<GenerateVideosOperation> privateGenerateVideos( String model, String prompt, Image image, Video video, GenerateVideosSource source, GenerateVideosConfig config) { BuiltRequest builtRequest = models.buildRequestForPrivateGenerateVideos(model, prompt, image, video, source, config); return this.apiClient .asyncRequest("post", builtRequest.path, builtRequest.body, builtRequest.httpOptions) .thenApplyAsync( response -> { try (ApiResponse res = response) { return models.processResponseForPrivateGenerateVideos(res, config); } }); } private static final Logger logger = Logger.getLogger(AsyncModels.class.getName()); /** * Asynchronously counts tokens given a GenAI model and a text string. * * @param model the name of the GenAI model to use. * @param text the text string to send to count tokens for. * @param config a {@link com.google.genai.types.CountTokensConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.CountTokensResponse} instance that contains tokens * count. */ public CompletableFuture<CountTokensResponse> countTokens( String model, String text, CountTokensConfig config) { return CompletableFuture.supplyAsync(() -> models.countTokens(model, text, config)); } /** * Asynchronously computes tokens given a GenAI model and a text string. * * @param model the name of the GenAI model to use. * @param text the text string to send to count tokens for. * @param config a {@link com.google.genai.types.ComputeTokensConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.ComputeTokensResponse} instance that contains tokens * results. */ public CompletableFuture<ComputeTokensResponse> computeTokens( String model, String text, ComputeTokensConfig config) { return CompletableFuture.supplyAsync(() -> models.computeTokens(model, text, config)); } /** A private helper class to pass the result of the AFC loop up the async chain. */ private static class AfcLoopResult { final GenerateContentResponse response; final List<Content> history; AfcLoopResult(GenerateContentResponse response, List<Content> history) { this.response = response; this.history = history; } } /** * Represents a single iteration of the asynchronous AFC loop. This method calls itself * recursively inside a .thenCompose() block to chain asynchronous calls sequentially. */ private CompletableFuture<AfcLoopResult> privateGenerateContentLoopAsync( String model, List<Content> contents, GenerateContentConfig transformedConfig, ImmutableMap<String, Method> functionMap, List<Content> automaticFunctionCallingHistory, int remainingRemoteCalls, int initialMaxCalls) { logger.info( String.format( "Automatic function calling remote call %d is done", (initialMaxCalls - remainingRemoteCalls + 1))); return privateGenerateContent(model, contents, transformedConfig) .thenCompose( response -> { if (remainingRemoteCalls - 1 <= 0) { logger.info("Reached max remote calls for automatic function calling."); return CompletableFuture.completedFuture( new AfcLoopResult(response, automaticFunctionCallingHistory)); } if (!response.candidates().isPresent() || response.candidates().get().isEmpty() || !response.candidates().get().get(0).content().isPresent() || !response.candidates().get().get(0).content().get().parts().isPresent() || response.candidates().get().get(0).content().get().parts().get().isEmpty()) { return CompletableFuture.completedFuture( new AfcLoopResult(response, automaticFunctionCallingHistory)); } ImmutableList<Part> functionResponseParts = AfcUtil.getFunctionResponseParts(response, functionMap); if (functionResponseParts.isEmpty()) { return CompletableFuture.completedFuture( new AfcLoopResult(response, automaticFunctionCallingHistory)); } Content functionCallContent = response.candidates().get().get(0).content().get(); Content functionResponseContent = Content.builder().role("user").parts(functionResponseParts).build(); List<Content> newHistory = new ArrayList<>(automaticFunctionCallingHistory); newHistory.add(functionCallContent); newHistory.add(functionResponseContent); return privateGenerateContentLoopAsync( model, newHistory, transformedConfig, functionMap, newHistory, remainingRemoteCalls - 1, initialMaxCalls); }); } /** * Asynchronously generates content given a GenAI model and a list of content. * * @param model the name of the GenAI model to use for generation * @param contents a {@link List<com.google.genai.types.Content>} to send to the generative model * @param config a {@link com.google.genai.types.GenerateContentConfig} instance that specifies * the optional configurations * @return a {@link com.google.genai.types.GenerateContentResponse} instance that contains * response contents and other metadata */ public CompletableFuture<GenerateContentResponse> generateContent( String model, List<Content> contents, GenerateContentConfig config) { GenerateContentConfig transformedConfig = AfcUtil.transformGenerateContentConfig(config); if (AfcUtil.shouldDisableAfc(transformedConfig)) { return privateGenerateContent(model, contents, transformedConfig); } ImmutableMap<String, Method> functionMap = AfcUtil.getFunctionMap(config); if (functionMap.isEmpty()) { return privateGenerateContent(model, contents, transformedConfig); } int maxRemoteCalls = AfcUtil.getMaxRemoteCallsAfc(transformedConfig); logger.info( String.format( "Automatic function calling is enabled with max remote calls: %d", maxRemoteCalls)); List<Content> automaticFunctionCallingHistory = new ArrayList<>(contents); return privateGenerateContentLoopAsync( model, contents, transformedConfig, functionMap, automaticFunctionCallingHistory, maxRemoteCalls, maxRemoteCalls) .thenApply( loopResult -> { if (AfcUtil.shouldAppendAfcHistory(transformedConfig)) { ObjectNode responseNode = JsonSerializable.objectMapper.valueToTree(loopResult.response); responseNode.set( "automaticFunctionCallingHistory", JsonSerializable.objectMapper.valueToTree(loopResult.history)); return JsonSerializable.fromJsonNode(responseNode, GenerateContentResponse.class); } return loopResult.response; }); } /** * Asynchronously generates content given a GenAI model and a content object. * * @param model the name of the GenAI model to use for generation * @param content a {@link com.google.genai.types.Content} to send to the generative model * @param config a {@link com.google.genai.types.GenerateContentConfig} instance that specifies * the optional configurations * @return a {@link com.google.genai.types.GenerateContentResponse} instance that contains * response contents and other metadata */ public CompletableFuture<GenerateContentResponse> generateContent( String model, Content content, GenerateContentConfig config) { return generateContent(model, Transformers.tContents(content), config); } /** * Asynchronously generates content given a GenAI model and a text string. * * @param model the name of the GenAI model to use for generation * @param text the text string to send to the generative model * @param config a {@link com.google.genai.types.GenerateContentConfig} instance that specifies * the optional configurations * @return a {@link com.google.genai.types.GenerateContentResponse} instance that contains * response contents and other metadata */ public CompletableFuture<GenerateContentResponse> generateContent( String model, String text, GenerateContentConfig config) { return generateContent(model, Transformers.tContents(text), config); } /** * Asynchronously generates content with streaming support given a GenAI model and a list of * content. * * @param model the name of the GenAI model to use for generation * @param contents a {@link List<com.google.genai.types.Content>} to send to the generative model * @param config a {@link com.google.genai.types.GenerateContentConfig} instance that specifies * the optional configurations * @return a {@link com.google.genai.types.GenerateContentResponse} instance that contains * response contents and other metadata */ public CompletableFuture<ResponseStream<GenerateContentResponse>> generateContentStream( String model, List<Content> contents, GenerateContentConfig config) { GenerateContentConfig transformedConfig = AfcUtil.transformGenerateContentConfig(config); if (AfcUtil.hasCallableTool(config) && !AfcUtil.shouldDisableAfc(transformedConfig)) { logger.warning( "In generateContentStream method, detected that automatic function calling is enabled in" + " the config.AutomaticFunctionCalling(), and callable tool is present in the" + " config.tools() list. Automatic function calling is not supported in streaming" + " methods at the moment, will just return the function call parts from model if" + " there is any."); } return privateGenerateContentStream(model, contents, transformedConfig); } /** * Asynchronously generates content with streaming support given a GenAI model and a content * object. * * @param model the name of the GenAI model to use for generation * @param content a {@link com.google.genai.types.Content} to send to the generative model * @param config a {@link com.google.genai.types.GenerateContentConfig} instance that specifies * the optional configurations * @return a {@link com.google.genai.types.GenerateContentResponse} instance that contains * response contents and other metadata */ public CompletableFuture<ResponseStream<GenerateContentResponse>> generateContentStream( String model, Content content, GenerateContentConfig config) { return generateContentStream(model, Transformers.tContents(content), config); } /** * Asynchronously generates content with streaming support given a GenAI model and a text string. * * @param model the name of the GenAI model to use for generation * @param text the text string to send to the generative model * @param config a {@link com.google.genai.types.GenerateContentConfig} instance that specifies * the optional configurations * @return a {@link com.google.genai.types.GenerateContentResponse} instance that contains * response contents and other metadata */ public CompletableFuture<ResponseStream<GenerateContentResponse>> generateContentStream( String model, String text, GenerateContentConfig config) { return generateContentStream(model, Transformers.tContents(text), config); } /** * Asynchronously generates images given a GenAI model and a prompt. * * @param model the name of the GenAI model to use for generating images * @param prompt the prompt to generate images * @param config a {@link com.google.genai.types.GenerateImagesConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.GenerateImagesResponse} instance that contains the * generated images. */ public CompletableFuture<GenerateImagesResponse> generateImages( String model, String prompt, GenerateImagesConfig config) { return CompletableFuture.supplyAsync(() -> models.generateImages(model, prompt, config)); } /** * Asynchronously edits an image given a GenAI model, a prompt, and a list of reference images. * * @param model the name of the GenAI model to use for editing capabilities * @param prompt the prompt to edit the image * @param referenceImages a {@link List<com.google.genai.types.ReferenceImage>} to send to use for * editing. The 5 types of reference images are: {@link * com.google.genai.types.RawReferenceImage}, {@link * com.google.genai.types.MaskReferenceImage}, {@link * com.google.genai.types.ControlReferenceImage}, {@link * com.google.genai.types.StyleReferenceImage}, {@link * com.google.genai.types.SubjectReferenceImage}, * @param config a {@link com.google.genai.types.EditImageConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.EditImageResponse} instance that contains the edited * image. */ public CompletableFuture<EditImageResponse> editImage( String model, String prompt, List<ReferenceImage> referenceImages, EditImageConfig config) { return CompletableFuture.supplyAsync( () -> models.editImage(model, prompt, referenceImages, config)); } /** * Asynchronously upscales an image given a GenAI model and an image and an upscale factor. * * @param model the name of the GenAI model to use for upscaling * @param image a {@link com.google.genai.types.Image} to send to the generative model * @param upscaleFactor the factor to upscale the image * @param config a {@link com.google.genai.types.UpscaleImageConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.UpscaleImageResponse} instance that contains the * upscaled image. */ public CompletableFuture<UpscaleImageResponse> upscaleImage( String model, Image image, String upscaleFactor, UpscaleImageConfig config) { return CompletableFuture.supplyAsync( () -> models.upscaleImage(model, image, upscaleFactor, config)); } /** * Asynchronously generates videos given a GenAI model, and a GenerateVideosSource source. * * <p>This method is experimental. * * @param model the name of the GenAI model to use for generating videos * @param source a {@link com.google.genai.types.GenerateVideosSource} that specifies the inputs * (prompt, image, and/or video) to generate videos. * @param config a {@link com.google.genai.types.GenerateVideosConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.GenerateVideosOperation} instance that contains the * generated videos. */ public CompletableFuture<GenerateVideosOperation> generateVideos( String model, GenerateVideosSource source, GenerateVideosConfig config) { return CompletableFuture.supplyAsync(() -> models.generateVideos(model, source, config)); } /** * Asynchronously generates videos given a GenAI model, and an input (text, image, or video). * * <p>This method is experimental. * * @param model the name of the GenAI model to use for generating videos * @param prompt the text prompt for generating the videos. Optional for image to video and video * extension use cases. * @param image the input image for generating the videos. Optional if prompt is provided. * @param video the input video for video extension use cases. Optional if prompt or image is * provided. * @param config a {@link com.google.genai.types.GenerateVideosConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.GenerateVideosOperation} instance that contains the * generated videos. */ public CompletableFuture<GenerateVideosOperation> generateVideos( String model, String prompt, Image image, Video video, GenerateVideosConfig config) { return CompletableFuture.supplyAsync( () -> models.generateVideos(model, prompt, image, video, config)); } /** * Asynchronously generates videos given a GenAI model, and an input (text, image). * * <p>This method is experimental, and kept for backward compatibility. * * @param model the name of the GenAI model to use for generating videos * @param prompt the text prompt for generating the videos. Optional for image to video use cases. * @param image the input image for generating the videos. Optional if prompt is provided. * @param config a {@link com.google.genai.types.GenerateVideosConfig} instance that specifies the * optional configurations * @return a {@link com.google.genai.types.GenerateVideosOperation} instance that contains the * generated videos. */ public CompletableFuture<GenerateVideosOperation> generateVideos( String model, String prompt, Image image, GenerateVideosConfig config) { return CompletableFuture.supplyAsync(() -> models.generateVideos(model, prompt, image, config)); } /** * Asynchronously embeds content given a GenAI model and a text string. * * @param model the name of the GenAI model to use for embedding * @param text the text string to send to the embedding model * @return a {@link com.google.genai.types.EmbedContentResponse} instance that contains the * embedding. */ public CompletableFuture<EmbedContentResponse> embedContent( String model, String text, EmbedContentConfig config) { return CompletableFuture.supplyAsync(() -> models.embedContent(model, text, config)); } /** * Asynchronously embeds content given a GenAI model and a list of text strings. * * @param model the name of the GenAI model to use for embedding * @param texts the list of text strings to send to the embedding model * @return a {@link com.google.genai.types.EmbedContentResponse} instance that contains the * embedding. */ public CompletableFuture<EmbedContentResponse> embedContent( String model, List<String> texts, EmbedContentConfig config) { return CompletableFuture.supplyAsync(() -> models.embedContent(model, texts, config)); } /** * Asynchronously makes an API request to list the available models. * * @param config A {@link ListModelsConfig} for configuring the list request. * @return A CompletableFuture that resolves to a {@link AsyncPager}. The AsyncPager has a * `forEach` method that can be used to asynchronously process items in the page and * automatically query the next page once the current page is exhausted. */ @SuppressWarnings("PatternMatchingInstanceof") public CompletableFuture<AsyncPager<Model>> list(ListModelsConfig config) { if (config == null) { config = ListModelsConfig.builder().build(); } if (config.filter().isPresent()) { throw new IllegalArgumentException("Filter is currently not supported for list models."); } ListModelsConfig.Builder configBuilder = config.toBuilder(); if (!config.queryBase().isPresent()) { configBuilder.queryBase(true); } else if (!config.queryBase().get() && models.apiClient.vertexAI()) { configBuilder.filter("labels.tune-type:*"); } final ListModelsConfig updatedConfig = configBuilder.build(); Function<JsonSerializable, CompletableFuture<JsonNode>> request = requestConfig -> { if (!(requestConfig instanceof ListModelsConfig)) { throw new GenAiIOException( "Internal error: Pager expected ListModelsConfig but received " + requestConfig.getClass().getName()); } return CompletableFuture.supplyAsync( () -> JsonSerializable.toJsonNode( models.privateList((ListModelsConfig) requestConfig))); }; return CompletableFuture.supplyAsync( () -> new AsyncPager<>( Pager.PagedItem.MODELS, request, (ObjectNode) JsonSerializable.toJsonNode(updatedConfig), request.apply(updatedConfig))); } }
apache/hbase
35,502
hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TLogQueryFilter.java
/** * Autogenerated by Thrift Compiler (0.14.1) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) /** * Thrift wrapper around * org.apache.hadoop.hbase.client.LogQueryFilter */ @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)", date = "2025-08-16") public class TLogQueryFilter implements org.apache.thrift.TBase<TLogQueryFilter, TLogQueryFilter._Fields>, java.io.Serializable, Cloneable, Comparable<TLogQueryFilter> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TLogQueryFilter"); private static final org.apache.thrift.protocol.TField REGION_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("regionName", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField CLIENT_ADDRESS_FIELD_DESC = new org.apache.thrift.protocol.TField("clientAddress", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField USER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("userName", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField LIMIT_FIELD_DESC = new org.apache.thrift.protocol.TField("limit", org.apache.thrift.protocol.TType.I32, (short)5); private static final org.apache.thrift.protocol.TField LOG_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("logType", org.apache.thrift.protocol.TType.I32, (short)6); private static final org.apache.thrift.protocol.TField FILTER_BY_OPERATOR_FIELD_DESC = new org.apache.thrift.protocol.TField("filterByOperator", org.apache.thrift.protocol.TType.I32, (short)7); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new TLogQueryFilterStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new TLogQueryFilterTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String regionName; // optional public @org.apache.thrift.annotation.Nullable java.lang.String clientAddress; // optional public @org.apache.thrift.annotation.Nullable java.lang.String tableName; // optional public @org.apache.thrift.annotation.Nullable java.lang.String userName; // optional public int limit; // optional /** * * @see TLogType */ public @org.apache.thrift.annotation.Nullable TLogType logType; // optional /** * * @see TFilterByOperator */ public @org.apache.thrift.annotation.Nullable TFilterByOperator filterByOperator; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { REGION_NAME((short)1, "regionName"), CLIENT_ADDRESS((short)2, "clientAddress"), TABLE_NAME((short)3, "tableName"), USER_NAME((short)4, "userName"), LIMIT((short)5, "limit"), /** * * @see TLogType */ LOG_TYPE((short)6, "logType"), /** * * @see TFilterByOperator */ FILTER_BY_OPERATOR((short)7, "filterByOperator"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // REGION_NAME return REGION_NAME; case 2: // CLIENT_ADDRESS return CLIENT_ADDRESS; case 3: // TABLE_NAME return TABLE_NAME; case 4: // USER_NAME return USER_NAME; case 5: // LIMIT return LIMIT; case 6: // LOG_TYPE return LOG_TYPE; case 7: // FILTER_BY_OPERATOR return FILTER_BY_OPERATOR; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __LIMIT_ISSET_ID = 0; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.REGION_NAME,_Fields.CLIENT_ADDRESS,_Fields.TABLE_NAME,_Fields.USER_NAME,_Fields.LIMIT,_Fields.LOG_TYPE,_Fields.FILTER_BY_OPERATOR}; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.REGION_NAME, new org.apache.thrift.meta_data.FieldMetaData("regionName", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.CLIENT_ADDRESS, new org.apache.thrift.meta_data.FieldMetaData("clientAddress", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.USER_NAME, new org.apache.thrift.meta_data.FieldMetaData("userName", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.LIMIT, new org.apache.thrift.meta_data.FieldMetaData("limit", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.LOG_TYPE, new org.apache.thrift.meta_data.FieldMetaData("logType", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TLogType.class))); tmpMap.put(_Fields.FILTER_BY_OPERATOR, new org.apache.thrift.meta_data.FieldMetaData("filterByOperator", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TFilterByOperator.class))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TLogQueryFilter.class, metaDataMap); } public TLogQueryFilter() { this.limit = 10; this.logType = org.apache.hadoop.hbase.thrift2.generated.TLogType.SLOW_LOG; this.filterByOperator = org.apache.hadoop.hbase.thrift2.generated.TFilterByOperator.OR; } /** * Performs a deep copy on <i>other</i>. */ public TLogQueryFilter(TLogQueryFilter other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetRegionName()) { this.regionName = other.regionName; } if (other.isSetClientAddress()) { this.clientAddress = other.clientAddress; } if (other.isSetTableName()) { this.tableName = other.tableName; } if (other.isSetUserName()) { this.userName = other.userName; } this.limit = other.limit; if (other.isSetLogType()) { this.logType = other.logType; } if (other.isSetFilterByOperator()) { this.filterByOperator = other.filterByOperator; } } public TLogQueryFilter deepCopy() { return new TLogQueryFilter(this); } @Override public void clear() { this.regionName = null; this.clientAddress = null; this.tableName = null; this.userName = null; this.limit = 10; this.logType = org.apache.hadoop.hbase.thrift2.generated.TLogType.SLOW_LOG; this.filterByOperator = org.apache.hadoop.hbase.thrift2.generated.TFilterByOperator.OR; } @org.apache.thrift.annotation.Nullable public java.lang.String getRegionName() { return this.regionName; } public TLogQueryFilter setRegionName(@org.apache.thrift.annotation.Nullable java.lang.String regionName) { this.regionName = regionName; return this; } public void unsetRegionName() { this.regionName = null; } /** Returns true if field regionName is set (has been assigned a value) and false otherwise */ public boolean isSetRegionName() { return this.regionName != null; } public void setRegionNameIsSet(boolean value) { if (!value) { this.regionName = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getClientAddress() { return this.clientAddress; } public TLogQueryFilter setClientAddress(@org.apache.thrift.annotation.Nullable java.lang.String clientAddress) { this.clientAddress = clientAddress; return this; } public void unsetClientAddress() { this.clientAddress = null; } /** Returns true if field clientAddress is set (has been assigned a value) and false otherwise */ public boolean isSetClientAddress() { return this.clientAddress != null; } public void setClientAddressIsSet(boolean value) { if (!value) { this.clientAddress = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getTableName() { return this.tableName; } public TLogQueryFilter setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) { this.tableName = tableName; return this; } public void unsetTableName() { this.tableName = null; } /** Returns true if field tableName is set (has been assigned a value) and false otherwise */ public boolean isSetTableName() { return this.tableName != null; } public void setTableNameIsSet(boolean value) { if (!value) { this.tableName = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getUserName() { return this.userName; } public TLogQueryFilter setUserName(@org.apache.thrift.annotation.Nullable java.lang.String userName) { this.userName = userName; return this; } public void unsetUserName() { this.userName = null; } /** Returns true if field userName is set (has been assigned a value) and false otherwise */ public boolean isSetUserName() { return this.userName != null; } public void setUserNameIsSet(boolean value) { if (!value) { this.userName = null; } } public int getLimit() { return this.limit; } public TLogQueryFilter setLimit(int limit) { this.limit = limit; setLimitIsSet(true); return this; } public void unsetLimit() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __LIMIT_ISSET_ID); } /** Returns true if field limit is set (has been assigned a value) and false otherwise */ public boolean isSetLimit() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __LIMIT_ISSET_ID); } public void setLimitIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __LIMIT_ISSET_ID, value); } /** * * @see TLogType */ @org.apache.thrift.annotation.Nullable public TLogType getLogType() { return this.logType; } /** * * @see TLogType */ public TLogQueryFilter setLogType(@org.apache.thrift.annotation.Nullable TLogType logType) { this.logType = logType; return this; } public void unsetLogType() { this.logType = null; } /** Returns true if field logType is set (has been assigned a value) and false otherwise */ public boolean isSetLogType() { return this.logType != null; } public void setLogTypeIsSet(boolean value) { if (!value) { this.logType = null; } } /** * * @see TFilterByOperator */ @org.apache.thrift.annotation.Nullable public TFilterByOperator getFilterByOperator() { return this.filterByOperator; } /** * * @see TFilterByOperator */ public TLogQueryFilter setFilterByOperator(@org.apache.thrift.annotation.Nullable TFilterByOperator filterByOperator) { this.filterByOperator = filterByOperator; return this; } public void unsetFilterByOperator() { this.filterByOperator = null; } /** Returns true if field filterByOperator is set (has been assigned a value) and false otherwise */ public boolean isSetFilterByOperator() { return this.filterByOperator != null; } public void setFilterByOperatorIsSet(boolean value) { if (!value) { this.filterByOperator = null; } } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case REGION_NAME: if (value == null) { unsetRegionName(); } else { setRegionName((java.lang.String)value); } break; case CLIENT_ADDRESS: if (value == null) { unsetClientAddress(); } else { setClientAddress((java.lang.String)value); } break; case TABLE_NAME: if (value == null) { unsetTableName(); } else { setTableName((java.lang.String)value); } break; case USER_NAME: if (value == null) { unsetUserName(); } else { setUserName((java.lang.String)value); } break; case LIMIT: if (value == null) { unsetLimit(); } else { setLimit((java.lang.Integer)value); } break; case LOG_TYPE: if (value == null) { unsetLogType(); } else { setLogType((TLogType)value); } break; case FILTER_BY_OPERATOR: if (value == null) { unsetFilterByOperator(); } else { setFilterByOperator((TFilterByOperator)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case REGION_NAME: return getRegionName(); case CLIENT_ADDRESS: return getClientAddress(); case TABLE_NAME: return getTableName(); case USER_NAME: return getUserName(); case LIMIT: return getLimit(); case LOG_TYPE: return getLogType(); case FILTER_BY_OPERATOR: return getFilterByOperator(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case REGION_NAME: return isSetRegionName(); case CLIENT_ADDRESS: return isSetClientAddress(); case TABLE_NAME: return isSetTableName(); case USER_NAME: return isSetUserName(); case LIMIT: return isSetLimit(); case LOG_TYPE: return isSetLogType(); case FILTER_BY_OPERATOR: return isSetFilterByOperator(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof TLogQueryFilter) return this.equals((TLogQueryFilter)that); return false; } public boolean equals(TLogQueryFilter that) { if (that == null) return false; if (this == that) return true; boolean this_present_regionName = true && this.isSetRegionName(); boolean that_present_regionName = true && that.isSetRegionName(); if (this_present_regionName || that_present_regionName) { if (!(this_present_regionName && that_present_regionName)) return false; if (!this.regionName.equals(that.regionName)) return false; } boolean this_present_clientAddress = true && this.isSetClientAddress(); boolean that_present_clientAddress = true && that.isSetClientAddress(); if (this_present_clientAddress || that_present_clientAddress) { if (!(this_present_clientAddress && that_present_clientAddress)) return false; if (!this.clientAddress.equals(that.clientAddress)) return false; } boolean this_present_tableName = true && this.isSetTableName(); boolean that_present_tableName = true && that.isSetTableName(); if (this_present_tableName || that_present_tableName) { if (!(this_present_tableName && that_present_tableName)) return false; if (!this.tableName.equals(that.tableName)) return false; } boolean this_present_userName = true && this.isSetUserName(); boolean that_present_userName = true && that.isSetUserName(); if (this_present_userName || that_present_userName) { if (!(this_present_userName && that_present_userName)) return false; if (!this.userName.equals(that.userName)) return false; } boolean this_present_limit = true && this.isSetLimit(); boolean that_present_limit = true && that.isSetLimit(); if (this_present_limit || that_present_limit) { if (!(this_present_limit && that_present_limit)) return false; if (this.limit != that.limit) return false; } boolean this_present_logType = true && this.isSetLogType(); boolean that_present_logType = true && that.isSetLogType(); if (this_present_logType || that_present_logType) { if (!(this_present_logType && that_present_logType)) return false; if (!this.logType.equals(that.logType)) return false; } boolean this_present_filterByOperator = true && this.isSetFilterByOperator(); boolean that_present_filterByOperator = true && that.isSetFilterByOperator(); if (this_present_filterByOperator || that_present_filterByOperator) { if (!(this_present_filterByOperator && that_present_filterByOperator)) return false; if (!this.filterByOperator.equals(that.filterByOperator)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetRegionName()) ? 131071 : 524287); if (isSetRegionName()) hashCode = hashCode * 8191 + regionName.hashCode(); hashCode = hashCode * 8191 + ((isSetClientAddress()) ? 131071 : 524287); if (isSetClientAddress()) hashCode = hashCode * 8191 + clientAddress.hashCode(); hashCode = hashCode * 8191 + ((isSetTableName()) ? 131071 : 524287); if (isSetTableName()) hashCode = hashCode * 8191 + tableName.hashCode(); hashCode = hashCode * 8191 + ((isSetUserName()) ? 131071 : 524287); if (isSetUserName()) hashCode = hashCode * 8191 + userName.hashCode(); hashCode = hashCode * 8191 + ((isSetLimit()) ? 131071 : 524287); if (isSetLimit()) hashCode = hashCode * 8191 + limit; hashCode = hashCode * 8191 + ((isSetLogType()) ? 131071 : 524287); if (isSetLogType()) hashCode = hashCode * 8191 + logType.getValue(); hashCode = hashCode * 8191 + ((isSetFilterByOperator()) ? 131071 : 524287); if (isSetFilterByOperator()) hashCode = hashCode * 8191 + filterByOperator.getValue(); return hashCode; } @Override public int compareTo(TLogQueryFilter other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetRegionName(), other.isSetRegionName()); if (lastComparison != 0) { return lastComparison; } if (isSetRegionName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.regionName, other.regionName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetClientAddress(), other.isSetClientAddress()); if (lastComparison != 0) { return lastComparison; } if (isSetClientAddress()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.clientAddress, other.clientAddress); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetTableName(), other.isSetTableName()); if (lastComparison != 0) { return lastComparison; } if (isSetTableName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, other.tableName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetUserName(), other.isSetUserName()); if (lastComparison != 0) { return lastComparison; } if (isSetUserName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userName, other.userName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetLimit(), other.isSetLimit()); if (lastComparison != 0) { return lastComparison; } if (isSetLimit()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.limit, other.limit); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetLogType(), other.isSetLogType()); if (lastComparison != 0) { return lastComparison; } if (isSetLogType()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.logType, other.logType); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetFilterByOperator(), other.isSetFilterByOperator()); if (lastComparison != 0) { return lastComparison; } if (isSetFilterByOperator()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterByOperator, other.filterByOperator); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("TLogQueryFilter("); boolean first = true; if (isSetRegionName()) { sb.append("regionName:"); if (this.regionName == null) { sb.append("null"); } else { sb.append(this.regionName); } first = false; } if (isSetClientAddress()) { if (!first) sb.append(", "); sb.append("clientAddress:"); if (this.clientAddress == null) { sb.append("null"); } else { sb.append(this.clientAddress); } first = false; } if (isSetTableName()) { if (!first) sb.append(", "); sb.append("tableName:"); if (this.tableName == null) { sb.append("null"); } else { sb.append(this.tableName); } first = false; } if (isSetUserName()) { if (!first) sb.append(", "); sb.append("userName:"); if (this.userName == null) { sb.append("null"); } else { sb.append(this.userName); } first = false; } if (isSetLimit()) { if (!first) sb.append(", "); sb.append("limit:"); sb.append(this.limit); first = false; } if (isSetLogType()) { if (!first) sb.append(", "); sb.append("logType:"); if (this.logType == null) { sb.append("null"); } else { sb.append(this.logType); } first = false; } if (isSetFilterByOperator()) { if (!first) sb.append(", "); sb.append("filterByOperator:"); if (this.filterByOperator == null) { sb.append("null"); } else { sb.append(this.filterByOperator); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TLogQueryFilterStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public TLogQueryFilterStandardScheme getScheme() { return new TLogQueryFilterStandardScheme(); } } private static class TLogQueryFilterStandardScheme extends org.apache.thrift.scheme.StandardScheme<TLogQueryFilter> { public void read(org.apache.thrift.protocol.TProtocol iprot, TLogQueryFilter struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // REGION_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.regionName = iprot.readString(); struct.setRegionNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // CLIENT_ADDRESS if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.clientAddress = iprot.readString(); struct.setClientAddressIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // TABLE_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.tableName = iprot.readString(); struct.setTableNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // USER_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.userName = iprot.readString(); struct.setUserNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // LIMIT if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.limit = iprot.readI32(); struct.setLimitIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // LOG_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.logType = org.apache.hadoop.hbase.thrift2.generated.TLogType.findByValue(iprot.readI32()); struct.setLogTypeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // FILTER_BY_OPERATOR if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.filterByOperator = org.apache.hadoop.hbase.thrift2.generated.TFilterByOperator.findByValue(iprot.readI32()); struct.setFilterByOperatorIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TLogQueryFilter struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.regionName != null) { if (struct.isSetRegionName()) { oprot.writeFieldBegin(REGION_NAME_FIELD_DESC); oprot.writeString(struct.regionName); oprot.writeFieldEnd(); } } if (struct.clientAddress != null) { if (struct.isSetClientAddress()) { oprot.writeFieldBegin(CLIENT_ADDRESS_FIELD_DESC); oprot.writeString(struct.clientAddress); oprot.writeFieldEnd(); } } if (struct.tableName != null) { if (struct.isSetTableName()) { oprot.writeFieldBegin(TABLE_NAME_FIELD_DESC); oprot.writeString(struct.tableName); oprot.writeFieldEnd(); } } if (struct.userName != null) { if (struct.isSetUserName()) { oprot.writeFieldBegin(USER_NAME_FIELD_DESC); oprot.writeString(struct.userName); oprot.writeFieldEnd(); } } if (struct.isSetLimit()) { oprot.writeFieldBegin(LIMIT_FIELD_DESC); oprot.writeI32(struct.limit); oprot.writeFieldEnd(); } if (struct.logType != null) { if (struct.isSetLogType()) { oprot.writeFieldBegin(LOG_TYPE_FIELD_DESC); oprot.writeI32(struct.logType.getValue()); oprot.writeFieldEnd(); } } if (struct.filterByOperator != null) { if (struct.isSetFilterByOperator()) { oprot.writeFieldBegin(FILTER_BY_OPERATOR_FIELD_DESC); oprot.writeI32(struct.filterByOperator.getValue()); oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TLogQueryFilterTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public TLogQueryFilterTupleScheme getScheme() { return new TLogQueryFilterTupleScheme(); } } private static class TLogQueryFilterTupleScheme extends org.apache.thrift.scheme.TupleScheme<TLogQueryFilter> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TLogQueryFilter struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetRegionName()) { optionals.set(0); } if (struct.isSetClientAddress()) { optionals.set(1); } if (struct.isSetTableName()) { optionals.set(2); } if (struct.isSetUserName()) { optionals.set(3); } if (struct.isSetLimit()) { optionals.set(4); } if (struct.isSetLogType()) { optionals.set(5); } if (struct.isSetFilterByOperator()) { optionals.set(6); } oprot.writeBitSet(optionals, 7); if (struct.isSetRegionName()) { oprot.writeString(struct.regionName); } if (struct.isSetClientAddress()) { oprot.writeString(struct.clientAddress); } if (struct.isSetTableName()) { oprot.writeString(struct.tableName); } if (struct.isSetUserName()) { oprot.writeString(struct.userName); } if (struct.isSetLimit()) { oprot.writeI32(struct.limit); } if (struct.isSetLogType()) { oprot.writeI32(struct.logType.getValue()); } if (struct.isSetFilterByOperator()) { oprot.writeI32(struct.filterByOperator.getValue()); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TLogQueryFilter struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(7); if (incoming.get(0)) { struct.regionName = iprot.readString(); struct.setRegionNameIsSet(true); } if (incoming.get(1)) { struct.clientAddress = iprot.readString(); struct.setClientAddressIsSet(true); } if (incoming.get(2)) { struct.tableName = iprot.readString(); struct.setTableNameIsSet(true); } if (incoming.get(3)) { struct.userName = iprot.readString(); struct.setUserNameIsSet(true); } if (incoming.get(4)) { struct.limit = iprot.readI32(); struct.setLimitIsSet(true); } if (incoming.get(5)) { struct.logType = org.apache.hadoop.hbase.thrift2.generated.TLogType.findByValue(iprot.readI32()); struct.setLogTypeIsSet(true); } if (incoming.get(6)) { struct.filterByOperator = org.apache.hadoop.hbase.thrift2.generated.TFilterByOperator.findByValue(iprot.readI32()); struct.setFilterByOperatorIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
apache/systemds
34,029
src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/FederatedPSControlThread.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.controlprogram.paramserv; import static org.apache.sysds.runtime.util.ProgramConverter.NEWLINE; import static org.apache.sysds.runtime.util.ProgramConverter.PROG_BEGIN; import static org.apache.sysds.runtime.util.ProgramConverter.PROG_END; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.sysds.api.DMLScript; import org.apache.sysds.parser.DataIdentifier; import org.apache.sysds.parser.Statement; import org.apache.sysds.parser.Statement.PSFrequency; import org.apache.sysds.parser.Statement.PSRuntimeBalancing; import org.apache.sysds.runtime.DMLRuntimeException; import org.apache.sysds.runtime.controlprogram.BasicProgramBlock; import org.apache.sysds.runtime.controlprogram.FunctionProgramBlock; import org.apache.sysds.runtime.controlprogram.ProgramBlock; import org.apache.sysds.runtime.controlprogram.caching.MatrixObject; import org.apache.sysds.runtime.controlprogram.context.ExecutionContext; import org.apache.sysds.runtime.controlprogram.federated.FederatedData; import org.apache.sysds.runtime.controlprogram.federated.FederatedRequest; import org.apache.sysds.runtime.controlprogram.federated.FederatedRequest.RequestType; import org.apache.sysds.runtime.controlprogram.federated.FederatedResponse; import org.apache.sysds.runtime.controlprogram.federated.FederatedUDF; import org.apache.sysds.runtime.controlprogram.federated.FederationUtils; import org.apache.sysds.runtime.controlprogram.paramserv.homomorphicEncryption.PublicKey; import org.apache.sysds.runtime.controlprogram.paramserv.homomorphicEncryption.SEALClient; import org.apache.sysds.runtime.functionobjects.Multiply; import org.apache.sysds.runtime.instructions.Instruction; import org.apache.sysds.runtime.instructions.InstructionUtils; import org.apache.sysds.runtime.instructions.cp.BooleanObject; import org.apache.sysds.runtime.instructions.cp.CPOperand; import org.apache.sysds.runtime.instructions.cp.CiphertextMatrix; import org.apache.sysds.runtime.instructions.cp.Data; import org.apache.sysds.runtime.instructions.cp.DoubleObject; import org.apache.sysds.runtime.instructions.cp.FunctionCallCPInstruction; import org.apache.sysds.runtime.instructions.cp.IntObject; import org.apache.sysds.runtime.instructions.cp.ListObject; import org.apache.sysds.runtime.instructions.cp.PlaintextMatrix; import org.apache.sysds.runtime.instructions.cp.StringObject; import org.apache.sysds.runtime.lineage.LineageItem; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.matrix.operators.RightScalarOperator; import org.apache.sysds.runtime.util.ProgramConverter; import org.apache.sysds.utils.stats.ParamServStatistics; import org.apache.sysds.utils.stats.Timing; public class FederatedPSControlThread extends PSWorker implements Callable<Void> { private static final long serialVersionUID = 6846648059569648791L; protected static final Log LOG = LogFactory.getLog(ParamServer.class.getName()); private FederatedData _featuresData; private FederatedData _labelsData; private final long _modelVarID; // runtime balancing private final PSRuntimeBalancing _runtimeBalancing; private int _numBatchesPerEpoch; private int _numBatchesPerNbatch; private int _possibleBatchesPerLocalEpoch; private final boolean _weighting; private double _weightingFactor = 1; private boolean _cycleStartAt0 = false; private boolean _use_homomorphic_encryption = false; private PublicKey _partial_public_key; public FederatedPSControlThread(int workerID, String updFunc, Statement.PSFrequency freq, PSRuntimeBalancing runtimeBalancing, boolean weighting, int epochs, long batchSize, int numBatchesPerGlobalEpoch, ExecutionContext ec, ParamServer ps, int nbatches, boolean modelAvg, boolean use_homomorphic_encryption) { super(workerID, updFunc, freq, epochs, batchSize, ec, ps, nbatches, modelAvg); _numBatchesPerEpoch = numBatchesPerGlobalEpoch; _runtimeBalancing = runtimeBalancing; _weighting = weighting && (!use_homomorphic_encryption); // FIXME: this disables weighting in favor of homomorphic encryption _numBatchesPerNbatch = nbatches; // generate the ID for the model _modelVarID = FederationUtils.getNextFedDataID(); _modelAvg = _use_homomorphic_encryption || modelAvg; // we always have to use modelAvg when using homomorphic encryption _use_homomorphic_encryption = use_homomorphic_encryption; } /** * Sets up the federated worker and control thread * * @param weightingFactor Gradients from this worker will be multiplied by this factor if weighting is enabled */ public void setup(double weightingFactor) { incWorkerNumber(); if (_use_homomorphic_encryption) { ((HEParamServer)_ps).registerThread(_workerID, this); } // prepare features and labels _featuresData = _features.getFedMapping().getFederatedData()[0]; _labelsData = _labels.getFedMapping().getFederatedData()[0]; // weighting factor is always set, but only used when weighting is specified _weightingFactor = weightingFactor; // different runtime balancing calculations long dataSize = _features.getNumRows(); // calculate scaled batch size if balancing via batch size. // In some cases there will be some cycling if(_runtimeBalancing == PSRuntimeBalancing.SCALE_BATCH) _batchSize = (int) Math.ceil((double) dataSize / _numBatchesPerEpoch); // Calculate possible batches with batch size _possibleBatchesPerLocalEpoch = (int) Math.ceil((double) dataSize / _batchSize); // If no runtime balancing is specified, just run possible number of batches // WARNING: Will get stuck on miss match if(_runtimeBalancing == PSRuntimeBalancing.NONE) _numBatchesPerEpoch = _possibleBatchesPerLocalEpoch; // If running in baseline mode set cycle to false if(_runtimeBalancing == PSRuntimeBalancing.BASELINE) _cycleStartAt0 = true; if( LOG.isInfoEnabled() ) { LOG.info("Setup config for worker " + this.getWorkerName()); LOG.info("Batch size: " + _batchSize + " possible batches: " + _possibleBatchesPerLocalEpoch + " batches to run: " + _numBatchesPerEpoch + " weighting factor: " + _weightingFactor); } // serialize program // create program blocks for the instruction filtering String programSerialized; ArrayList<ProgramBlock> pbs = new ArrayList<>(); BasicProgramBlock gradientProgramBlock = new BasicProgramBlock(_ec.getProgram()); gradientProgramBlock.setInstructions(new ArrayList<>(Collections.singletonList(_inst))); pbs.add(gradientProgramBlock); if(_freq == PSFrequency.EPOCH || _freq == PSFrequency.NBATCHES) { BasicProgramBlock aggProgramBlock = new BasicProgramBlock(_ec.getProgram()); aggProgramBlock.setInstructions(new ArrayList<>(Collections.singletonList(_ps.getAggInst()))); pbs.add(aggProgramBlock); } programSerialized = InstructionUtils.concatStrings( PROG_BEGIN, NEWLINE, ProgramConverter.serializeProgram(_ec.getProgram(), pbs, new HashMap<>()), PROG_END); // write program and meta data to worker Future<FederatedResponse> udfResponse; final SetupFederatedWorker udf; if (_use_homomorphic_encryption) { byte[] a = ((HEParamServer)_ps).generateA(); // generate pk[i] on each client and return it udf = new SetupHEFederatedWorker(a); } else { udf = new SetupFederatedWorker(); } udf.setParams(_batchSize, dataSize, _possibleBatchesPerLocalEpoch, programSerialized, _inst.getNamespace(), _inst.getFunctionName(), _ps.getAggInst().getFunctionName(), _ec.getListObject("hyperparams"), _modelVarID, _nbatches, _use_homomorphic_encryption || _modelAvg); udfResponse = _featuresData.executeFederatedOperation( new FederatedRequest(RequestType.EXEC_UDF, _featuresData.getVarID(), udf)); FederatedResponse response; try { response = udfResponse.get(); if(!response.isSuccessful()) throw new DMLRuntimeException("FederatedLocalPSThread: Setup UDF failed"); } catch(Exception e) { throw new DMLRuntimeException("FederatedLocalPSThread: failed to execute Setup UDF" + e.getMessage()); } if (_use_homomorphic_encryption) { try { _partial_public_key = (PublicKey) response.getData()[0]; } catch (Exception e) { throw new DMLRuntimeException("FederatedLocalPSThread: HE Setup UDF didn't return an object"); } } } /** * cleans up the execution context of the federated worker */ public void teardown() { // write program and meta data to worker Future<FederatedResponse> udfResponse = _featuresData.executeFederatedOperation( new FederatedRequest(RequestType.EXEC_UDF, _featuresData.getVarID(), new TeardownFederatedWorker() )); try { FederatedResponse response = udfResponse.get(); if(!response.isSuccessful()) throw new DMLRuntimeException("FederatedLocalPSThread: Teardown UDF failed"); } catch(Exception e) { throw new DMLRuntimeException("FederatedLocalPSThread: failed to execute Teardown UDF" + e.getMessage()); } } /** * Setup UDF executed on the federated worker */ private static class SetupFederatedWorker extends FederatedUDF { private static final long serialVersionUID = -3148991224792675607L; private long _batchSize; private long _dataSize; private int _possibleBatchesPerLocalEpoch; private String _programString; private String _namespace; private String _gradientsFunctionName; private String _aggregationFunctionName; private ListObject _hyperParams; private long _modelVarID; private boolean _modelAvg; private int _nbatches; private boolean _params_set = false; protected SetupFederatedWorker() { super(new long[]{}); } public void setParams(long batchSize, long dataSize, int possibleBatchesPerLocalEpoch, String programString, String namespace, String gradientsFunctionName, String aggregationFunctionName, ListObject hyperParams, long modelVarID, int nbatches, boolean modelAvg) { _batchSize = batchSize; _dataSize = dataSize; _possibleBatchesPerLocalEpoch = possibleBatchesPerLocalEpoch; _programString = programString; _namespace = namespace; _gradientsFunctionName = gradientsFunctionName; _aggregationFunctionName = aggregationFunctionName; _hyperParams = hyperParams; _modelVarID = modelVarID; _modelAvg = modelAvg; _nbatches = nbatches; _params_set = true; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { if (!_params_set) { return new FederatedResponse(FederatedResponse.ResponseType.ERROR, "params were not set"); } // parse and set program ec.setProgram(ProgramConverter.parseProgram(_programString, 0)); // set variables to ec ec.setVariable(Statement.PS_FED_BATCH_SIZE, new IntObject(_batchSize)); ec.setVariable(Statement.PS_FED_DATA_SIZE, new IntObject(_dataSize)); ec.setVariable(Statement.PS_FED_POSS_BATCHES_LOCAL, new IntObject(_possibleBatchesPerLocalEpoch)); ec.setVariable(Statement.PS_FED_NAMESPACE, new StringObject(_namespace)); ec.setVariable(Statement.PS_FED_GRADIENTS_FNAME, new StringObject(_gradientsFunctionName)); ec.setVariable(Statement.PS_FED_AGGREGATION_FNAME, new StringObject(_aggregationFunctionName)); ec.setVariable(Statement.PS_HYPER_PARAMS, _hyperParams); ec.setVariable(Statement.PS_FED_MODEL_VARID, new IntObject(_modelVarID)); ec.setVariable(Statement.PS_NBATCHES, new IntObject(_nbatches)); ec.setVariable(Statement.PS_MODELAVG, new BooleanObject(_modelAvg)); return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } private static class SetupHEFederatedWorker extends SetupFederatedWorker { private static final long serialVersionUID = 9128347291804980123L; byte[] _partial_pubkey_a; protected SetupHEFederatedWorker(byte[] partial_pubkey_a) { // delegate everything to parent class. set modelAvg to true, as it is the only supported case super(); _partial_pubkey_a = partial_pubkey_a; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { // TODO: set other CKKS parameters // TODO generate partial public key NativeHEHelper.initialize(); SEALClient sc = new SEALClient(_partial_pubkey_a); ec.setSealClient(sc); PublicKey partial_pubkey = sc.generatePartialPublicKey(); FederatedResponse res = super.execute(ec, data); if (!res.isSuccessful()) { return res; } return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, partial_pubkey); } } /** * Teardown UDF executed on the federated worker */ private static class SetPublicKeyFederatedWorker extends FederatedUDF { private static final long serialVersionUID = -1536502123123318969L; private final PublicKey _public_key; protected SetPublicKeyFederatedWorker(PublicKey public_key) { super(new long[]{}); _public_key = public_key; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { ec.getSealClient().setPublicKey(_public_key); return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } private static class TeardownFederatedWorker extends FederatedUDF { private static final long serialVersionUID = -153650281873318969L; protected TeardownFederatedWorker() { super(new long[]{}); } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { // remove variables from ec ec.removeVariable(Statement.PS_FED_BATCH_SIZE); ec.removeVariable(Statement.PS_FED_DATA_SIZE); ec.removeVariable(Statement.PS_FED_POSS_BATCHES_LOCAL); ec.removeVariable(Statement.PS_FED_NAMESPACE); ec.removeVariable(Statement.PS_FED_GRADIENTS_FNAME); ec.removeVariable(Statement.PS_FED_AGGREGATION_FNAME); ec.removeVariable(Statement.PS_FED_MODEL_VARID); ParamservUtils.cleanupListObject(ec, Statement.PS_HYPER_PARAMS); return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } /** * Entry point of the functionality * * @return void * @throws Exception incase the execution fails */ @Override public Void call() throws Exception { try { switch (_freq) { case BATCH: computeWithBatchUpdates(); break; case NBATCHES: computeWithNBatchUpdates(); break; case EPOCH: computeWithEpochUpdates(); break; default: throw new DMLRuntimeException(String.format("%s not support update frequency %s", getWorkerName(), _freq)); } } catch (Exception e) { throw new DMLRuntimeException(String.format("%s failed", getWorkerName()), e); } teardown(); return null; } protected ListObject pullModel() { // Pull the global parameters from ps return _ps.pull(_workerID); } protected void weightAndPushGradients(ListObject gradients) { assert (!(_weighting && _use_homomorphic_encryption)) : "weights and homomorphic encryption are not supported together"; // scale gradients - must only include MatrixObjects if(_weighting && _weightingFactor != 1) { Timing tWeighting = DMLScript.STATISTICS ? new Timing(true) : null; gradients.getData().parallelStream().forEach((matrix) -> { MatrixObject matrixObject = (MatrixObject) matrix; MatrixBlock input = matrixObject.acquireReadAndRelease().scalarOperations( new RightScalarOperator(Multiply.getMultiplyFnObject(), _weightingFactor), new MatrixBlock()); matrixObject.acquireModify(input); matrixObject.release(); }); accFedPSGradientWeightingTime(tWeighting); } // Push the gradients to ps _ps.push(_workerID, gradients); } protected static int getNextLocalBatchNum(int currentLocalBatchNumber, int possibleBatchesPerLocalEpoch) { return currentLocalBatchNumber % possibleBatchesPerLocalEpoch; } /** * Computes all epochs and updates after each batch */ protected void computeWithBatchUpdates() { for (int epochCounter = 0; epochCounter < _epochs; epochCounter++) { int currentLocalBatchNumber = (_cycleStartAt0) ? 0 : _numBatchesPerEpoch * epochCounter % _possibleBatchesPerLocalEpoch; for (int batchCounter = 0; batchCounter < _numBatchesPerEpoch; batchCounter++) { int localStartBatchNum = getNextLocalBatchNum(currentLocalBatchNumber++, _possibleBatchesPerLocalEpoch); ListObject model = pullModel(); ListObject gradients = computeGradientsForNBatches(model, 1, localStartBatchNum); Timing tAgg = DMLScript.STATISTICS ? new Timing(true) : null; if (_modelAvg && !_use_homomorphic_encryption) // we can't call the agg fn if we use HE, because it is implemented homomorphically in SEALServer::aggregateCiphertexts model = _ps.updateLocalModel(_ec, gradients, model); else ParamservUtils.cleanupListObject(model); weightAndPushGradients((_modelAvg && !_use_homomorphic_encryption) ? model : gradients); if (tAgg != null) { ParamServStatistics.accFedAggregation((long)tAgg.stop()); } ParamservUtils.cleanupListObject(gradients); } } } /** * Computes all epochs and updates after N batches */ protected void computeWithNBatchUpdates() { int numSetsPerEpocNbatches = (int) Math.ceil((double)_numBatchesPerEpoch / _numBatchesPerNbatch); for (int epochCounter = 0; epochCounter < _epochs; epochCounter++) { int currentLocalBatchNumber = (_cycleStartAt0) ? 0 : _numBatchesPerEpoch * epochCounter % _possibleBatchesPerLocalEpoch; for (int batchCounter = 0; batchCounter < numSetsPerEpocNbatches; batchCounter++) { int localStartBatchNum = getNextLocalBatchNum(currentLocalBatchNumber, numSetsPerEpocNbatches); currentLocalBatchNumber = currentLocalBatchNumber + _numBatchesPerNbatch; ListObject model = pullModel(); ListObject gradients = computeGradientsForNBatches(model, _numBatchesPerNbatch, localStartBatchNum, true); Timing tAgg = DMLScript.STATISTICS ? new Timing(true) : null; weightAndPushGradients(gradients); if (tAgg != null) { ParamServStatistics.accFedAggregation((long)tAgg.stop()); } ParamservUtils.cleanupListObject(model); ParamservUtils.cleanupListObject(gradients); } } } /** * Computes all epochs and updates after each epoch */ protected void computeWithEpochUpdates() { for (int epochCounter = 0; epochCounter < _epochs; epochCounter++) { int localStartBatchNum = (_cycleStartAt0) ? 0 : _numBatchesPerEpoch * epochCounter % _possibleBatchesPerLocalEpoch; // Pull the global parameters from ps ListObject model = pullModel(); ListObject gradients = computeGradientsForNBatches(model, _numBatchesPerEpoch, localStartBatchNum, true); Timing tAgg = DMLScript.STATISTICS ? new Timing(true) : null; weightAndPushGradients(gradients); if (tAgg != null) { ParamServStatistics.accFedAggregation((long)tAgg.stop()); } ParamservUtils.cleanupListObject(model); ParamservUtils.cleanupListObject(gradients); } } protected ListObject computeGradientsForNBatches(ListObject model, int numBatchesToCompute, int localStartBatchNum) { return computeGradientsForNBatches(model, numBatchesToCompute, localStartBatchNum, false); } /** * Computes the gradients of n batches on the federated worker and is able to update the model local. * Returns the gradients. * * @param model the current model from the parameter server * @param localStartBatchNum the batch to start from * @param localUpdate whether to update the model locally * * @return the gradient vector */ protected ListObject computeGradientsForNBatches(ListObject model, int numBatchesToCompute, int localStartBatchNum, boolean localUpdate) { Timing tFedCommunication = DMLScript.STATISTICS ? new Timing(true) : null; // put current model on federated worker Future<FederatedResponse> putParamsResponse = _featuresData.executeFederatedOperation( new FederatedRequest(RequestType.PUT_VAR, _modelVarID, model)); try { if(!putParamsResponse.get().isSuccessful()) throw new DMLRuntimeException("FederatedLocalPSThread: put was not successful"); } catch(Exception e) { throw new DMLRuntimeException("FederatedLocalPSThread: failed to execute put" + e.getMessage()); } // create and execute the udf on the remote worker Object udf; if (_use_homomorphic_encryption) { udf = new HEComputeGradientsForNBatches(new long[]{_featuresData.getVarID(), _labelsData.getVarID()}, new long[]{_modelVarID}, numBatchesToCompute, localUpdate, localStartBatchNum); } else { udf = new federatedComputeGradientsForNBatches(new long[]{_featuresData.getVarID(), _labelsData.getVarID(), _modelVarID}, numBatchesToCompute, localUpdate, localStartBatchNum); } Future<FederatedResponse> udfResponse = _featuresData.executeFederatedOperation( new FederatedRequest(RequestType.EXEC_UDF, _featuresData.getVarID(), udf)); try { Object[] responseData = udfResponse.get().getData(); if(tFedCommunication != null) { long total = (long) tFedCommunication.stop(); long workerComputing = ((DoubleObject) responseData[1]).getLongValue(); ParamServStatistics.accFedWorkerComputing(workerComputing); ParamServStatistics.accFedCommunicationTime(total - workerComputing); ParamServStatistics.accFedNetworkTime(total); } return (ListObject) responseData[0]; } catch(Exception e) { if(tFedCommunication != null) tFedCommunication.stop(); throw new DMLRuntimeException("FederatedLocalPSThread: failed to execute UDF" + e.getMessage(), e); } } /** * This is the code that will be executed on the federated Worker when computing one gradients for n batches */ private static class federatedComputeGradientsForNBatches extends FederatedUDF { private static final long serialVersionUID = -3075901536748794832L; int _numBatchesToCompute; boolean _localUpdate; int _localStartBatchNum; protected federatedComputeGradientsForNBatches(long[] inIDs, int numBatchesToCompute, boolean localUpdate, int localStartBatchNum) { super(inIDs); _numBatchesToCompute = numBatchesToCompute; _localUpdate = localUpdate; _localStartBatchNum = localStartBatchNum; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { Timing tGradients = new Timing(true); // read in data by varid MatrixObject features = (MatrixObject) data[0]; MatrixObject labels = (MatrixObject) data[1]; ListObject model = (ListObject) data[2]; // get data from execution context long batchSize = ((IntObject) ec.getVariable(Statement.PS_FED_BATCH_SIZE)).getLongValue(); long dataSize = ((IntObject) ec.getVariable(Statement.PS_FED_DATA_SIZE)).getLongValue(); int possibleBatchesPerLocalEpoch = (int) ((IntObject) ec.getVariable(Statement.PS_FED_POSS_BATCHES_LOCAL)).getLongValue(); String namespace = ((StringObject) ec.getVariable(Statement.PS_FED_NAMESPACE)).getStringValue(); String gradientsFunc = ((StringObject) ec.getVariable(Statement.PS_FED_GRADIENTS_FNAME)).getStringValue(); String aggFunc = ((StringObject) ec.getVariable(Statement.PS_FED_AGGREGATION_FNAME)).getStringValue(); boolean modelAvg = ((BooleanObject) ec.getVariable(Statement.PS_MODELAVG)).getBooleanValue(); // recreate gradient instruction and output boolean opt = !ec.getProgram().containsFunctionProgramBlock(namespace, gradientsFunc, false); FunctionProgramBlock func = ec.getProgram().getFunctionProgramBlock(namespace, gradientsFunc, opt); ArrayList<DataIdentifier> inputs = func.getInputParams(); ArrayList<DataIdentifier> outputs = func.getOutputParams(); CPOperand[] boundInputs = inputs.stream() .map(input -> new CPOperand(input.getName(), input.getValueType(), input.getDataType())) .toArray(CPOperand[]::new); ArrayList<String> outputNames = outputs.stream().map(DataIdentifier::getName) .collect(Collectors.toCollection(ArrayList::new)); Instruction gradientsInstruction = new FunctionCallCPInstruction(namespace, gradientsFunc, opt, boundInputs, func.getInputParamNames(), outputNames, "gradient function"); DataIdentifier gradientsOutput = outputs.get(0); // recreate aggregation instruction and output if needed Instruction aggregationInstruction = null; DataIdentifier aggregationOutput = null; boolean loc= _localUpdate && _numBatchesToCompute > 1 | modelAvg; if(loc) { func = ec.getProgram().getFunctionProgramBlock(namespace, aggFunc, opt); inputs = func.getInputParams(); outputs = func.getOutputParams(); boundInputs = inputs.stream() .map(input -> new CPOperand(input.getName(), input.getValueType(), input.getDataType())) .toArray(CPOperand[]::new); outputNames = outputs.stream().map(DataIdentifier::getName) .collect(Collectors.toCollection(ArrayList::new)); aggregationInstruction = new FunctionCallCPInstruction(namespace, aggFunc, opt, boundInputs, func.getInputParamNames(), outputNames, "aggregation function"); aggregationOutput = outputs.get(0); } ListObject accGradients = null; int currentLocalBatchNumber = _localStartBatchNum; // prepare execution context ec.setVariable(Statement.PS_MODEL, model); for(int batchCounter = 0; batchCounter < _numBatchesToCompute; batchCounter++) { int localBatchNum = getNextLocalBatchNum(currentLocalBatchNumber++, possibleBatchesPerLocalEpoch); // slice batch from feature and label matrix long begin = localBatchNum * batchSize + 1; long end = Math.min((localBatchNum + 1) * batchSize, dataSize); MatrixObject bFeatures = ParamservUtils.sliceMatrix(features, begin, end); MatrixObject bLabels = ParamservUtils.sliceMatrix(labels, begin, end); // prepare execution context ec.setVariable(Statement.PS_FEATURES, bFeatures); ec.setVariable(Statement.PS_LABELS, bLabels); // calculate gradients for batch gradientsInstruction.processInstruction(ec); ListObject gradients = ec.getListObject(gradientsOutput.getName()); // accrue the computed gradients - In the single batch case this is just a list copy // is this equivalent for momentum based and AMS prob? accGradients = modelAvg ? null : ParamservUtils.accrueGradients(accGradients, gradients, false); // update the local model with gradients if needed // FIXME ensure that with modelAvg we always update the model // (current fails due to missing aggregation instruction) if(loc && aggregationInstruction != null && aggregationOutput != null) { // Invoke the aggregate function aggregationInstruction.processInstruction(ec); // Get the new model model = ec.getListObject(aggregationOutput.getName()); // Set new model in execution context ec.setVariable(Statement.PS_MODEL, model); // clean up gradients and result ParamservUtils.cleanupListObject(ec, aggregationOutput.getName()); } // clean up ParamservUtils.cleanupData(ec, Statement.PS_FEATURES); ParamservUtils.cleanupData(ec, Statement.PS_LABELS); } // model clean up ParamservUtils.cleanupListObject(ec, ec.getVariable(Statement.PS_FED_MODEL_VARID).toString()); // TODO double check cleanup gradients and models // stop timing DoubleObject gradientsTime = new DoubleObject(tGradients.stop()); ParamServStatistics.accGradientComputeTime(gradientsTime.getLongValue()); return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, new Object[]{modelAvg ? model : accGradients, gradientsTime}); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } /** * This wraps federatedComputeGradientsForNBatches and adds encryption */ private static class HEComputeGradientsForNBatches extends federatedComputeGradientsForNBatches { private static final long serialVersionUID = -3535901512348794852L; private final long[] _deferredIds; protected HEComputeGradientsForNBatches(long[] deferredIds, long[] inIDs, int numBatchesToCompute, boolean localUpdate, int localStartBatchNum) { super(inIDs, numBatchesToCompute, localUpdate, localStartBatchNum); _deferredIds = deferredIds; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data_without_deferred) { Timing tTotal = new Timing(true); // add features and gradients to data Data[] deferred_inputs = Arrays.stream(_deferredIds).mapToObj(id -> ec.getVariable(String.valueOf(id))).toArray(Data[]::new); Data[] data = Arrays.copyOf(deferred_inputs, deferred_inputs.length + data_without_deferred.length); System.arraycopy(data_without_deferred, 0, data, deferred_inputs.length, data_without_deferred.length); FederatedResponse res = super.execute(ec, data); if (!res.isSuccessful()) { return res; } // encrypt model with SEAL try { Timing tEncrypt = DMLScript.STATISTICS ? new Timing(true) : null; ListObject model = (ListObject) res.getData()[0]; ListObject encrypted_model = new ListObject(model); IntStream.range(0, model.getLength()).forEach(matrix_idx -> { CiphertextMatrix encrypted_matrix = ec.getSealClient().encrypt((MatrixObject) model.getData(matrix_idx)); encrypted_model.set(matrix_idx, encrypted_matrix); }); // overwrite model with encryption res.getData()[0] = encrypted_model; if (tEncrypt != null) { ParamServStatistics.accHEEncryptionTime((long)tEncrypt.stop()); } // stop timing DoubleObject gradientsTime = new DoubleObject(tTotal.stop()); res.getData()[1] = gradientsTime; } catch (Exception e) { return new FederatedResponse(FederatedResponse.ResponseType.ERROR, new Object[] { e }); } return res; } } private static class HEComputePartialDecryption extends FederatedUDF { private static final long serialVersionUID = -4535098129348794852L; private final CiphertextMatrix[] _encrypted_sum; protected HEComputePartialDecryption(CiphertextMatrix[] encrypted_sum) { super(new long[]{}); _encrypted_sum = encrypted_sum; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { Timing tPartialDecrypt = DMLScript.STATISTICS ? new Timing(true) : null; PlaintextMatrix[] result = new PlaintextMatrix[_encrypted_sum.length]; IntStream.range(0, result.length).forEach(i -> { result[i] = ec.getSealClient().partiallyDecrypt(_encrypted_sum[i]); }); if (tPartialDecrypt != null) { ParamServStatistics.accHEPartialDecryptionTime((long)tPartialDecrypt.stop()); } return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, result); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } public PlaintextMatrix[] getPartialDecryption(CiphertextMatrix[] encrypted_sum) { Object udf = new HEComputePartialDecryption(encrypted_sum); Future<FederatedResponse> udfResponse = _featuresData.executeFederatedOperation( new FederatedRequest(RequestType.EXEC_UDF, _featuresData.getVarID(), udf)); try { Object[] responseData = udfResponse.get().getData(); return (PlaintextMatrix[]) responseData; } catch(Exception e) { throw new DMLRuntimeException("FederatedLocalPSThread: failed to execute UDF" + e.getMessage()); } } // Statistics methods protected void accFedPSGradientWeightingTime(Timing time) { if (DMLScript.STATISTICS && time != null) ParamServStatistics.accFedGradientWeightingTime((long) time.stop()); } @Override public String getWorkerName() { return String.format("Federated worker_%d", _workerID); } @Override protected void incWorkerNumber() { if (DMLScript.STATISTICS) ParamServStatistics.incWorkerNumber(); } @Override protected void accLocalModelUpdateTime(Timing time) { throw new NotImplementedException(); } @Override protected void accBatchIndexingTime(Timing time) { throw new NotImplementedException(); } @Override protected void accGradientComputeTime(Timing time) { throw new NotImplementedException(); } public PublicKey getPartialPublicKey() { return _partial_public_key; } public void setPublicKey(PublicKey public_key) { Future<FederatedResponse> res = _featuresData.executeFederatedOperation( new FederatedRequest(RequestType.EXEC_UDF, _featuresData.getVarID(), new SetPublicKeyFederatedWorker(public_key))); try { FederatedResponse response = res.get(); if(!response.isSuccessful()) throw new DMLRuntimeException("FederatedLocalPSThread: SetPublicKey UDF failed"); } catch(Exception e) { throw new DMLRuntimeException("FederatedLocalPSThread: failed to execute Public Key Setup UDF" + e.getMessage()); } } }
googleapis/google-cloud-java
35,450
java-retail/proto-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/ListControlsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/retail/v2alpha/control_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.retail.v2alpha; /** * * * <pre> * Response for ListControls method. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.ListControlsResponse} */ public final class ListControlsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.retail.v2alpha.ListControlsResponse) ListControlsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListControlsResponse.newBuilder() to construct. private ListControlsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListControlsResponse() { controls_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListControlsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.ControlServiceProto .internal_static_google_cloud_retail_v2alpha_ListControlsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.ControlServiceProto .internal_static_google_cloud_retail_v2alpha_ListControlsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.ListControlsResponse.class, com.google.cloud.retail.v2alpha.ListControlsResponse.Builder.class); } public static final int CONTROLS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.retail.v2alpha.Control> controls_; /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.retail.v2alpha.Control> getControlsList() { return controls_; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.retail.v2alpha.ControlOrBuilder> getControlsOrBuilderList() { return controls_; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ @java.lang.Override public int getControlsCount() { return controls_.size(); } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ @java.lang.Override public com.google.cloud.retail.v2alpha.Control getControls(int index) { return controls_.get(index); } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ @java.lang.Override public com.google.cloud.retail.v2alpha.ControlOrBuilder getControlsOrBuilder(int index) { return controls_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < controls_.size(); i++) { output.writeMessage(1, controls_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < controls_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, controls_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.retail.v2alpha.ListControlsResponse)) { return super.equals(obj); } com.google.cloud.retail.v2alpha.ListControlsResponse other = (com.google.cloud.retail.v2alpha.ListControlsResponse) obj; if (!getControlsList().equals(other.getControlsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getControlsCount() > 0) { hash = (37 * hash) + CONTROLS_FIELD_NUMBER; hash = (53 * hash) + getControlsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.ListControlsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.retail.v2alpha.ListControlsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for ListControls method. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.ListControlsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2alpha.ListControlsResponse) com.google.cloud.retail.v2alpha.ListControlsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.ControlServiceProto .internal_static_google_cloud_retail_v2alpha_ListControlsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.ControlServiceProto .internal_static_google_cloud_retail_v2alpha_ListControlsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.ListControlsResponse.class, com.google.cloud.retail.v2alpha.ListControlsResponse.Builder.class); } // Construct using com.google.cloud.retail.v2alpha.ListControlsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (controlsBuilder_ == null) { controls_ = java.util.Collections.emptyList(); } else { controls_ = null; controlsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.retail.v2alpha.ControlServiceProto .internal_static_google_cloud_retail_v2alpha_ListControlsResponse_descriptor; } @java.lang.Override public com.google.cloud.retail.v2alpha.ListControlsResponse getDefaultInstanceForType() { return com.google.cloud.retail.v2alpha.ListControlsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.retail.v2alpha.ListControlsResponse build() { com.google.cloud.retail.v2alpha.ListControlsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.retail.v2alpha.ListControlsResponse buildPartial() { com.google.cloud.retail.v2alpha.ListControlsResponse result = new com.google.cloud.retail.v2alpha.ListControlsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.retail.v2alpha.ListControlsResponse result) { if (controlsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { controls_ = java.util.Collections.unmodifiableList(controls_); bitField0_ = (bitField0_ & ~0x00000001); } result.controls_ = controls_; } else { result.controls_ = controlsBuilder_.build(); } } private void buildPartial0(com.google.cloud.retail.v2alpha.ListControlsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.retail.v2alpha.ListControlsResponse) { return mergeFrom((com.google.cloud.retail.v2alpha.ListControlsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.retail.v2alpha.ListControlsResponse other) { if (other == com.google.cloud.retail.v2alpha.ListControlsResponse.getDefaultInstance()) return this; if (controlsBuilder_ == null) { if (!other.controls_.isEmpty()) { if (controls_.isEmpty()) { controls_ = other.controls_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureControlsIsMutable(); controls_.addAll(other.controls_); } onChanged(); } } else { if (!other.controls_.isEmpty()) { if (controlsBuilder_.isEmpty()) { controlsBuilder_.dispose(); controlsBuilder_ = null; controls_ = other.controls_; bitField0_ = (bitField0_ & ~0x00000001); controlsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getControlsFieldBuilder() : null; } else { controlsBuilder_.addAllMessages(other.controls_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.retail.v2alpha.Control m = input.readMessage( com.google.cloud.retail.v2alpha.Control.parser(), extensionRegistry); if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(m); } else { controlsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.retail.v2alpha.Control> controls_ = java.util.Collections.emptyList(); private void ensureControlsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { controls_ = new java.util.ArrayList<com.google.cloud.retail.v2alpha.Control>(controls_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2alpha.Control, com.google.cloud.retail.v2alpha.Control.Builder, com.google.cloud.retail.v2alpha.ControlOrBuilder> controlsBuilder_; /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public java.util.List<com.google.cloud.retail.v2alpha.Control> getControlsList() { if (controlsBuilder_ == null) { return java.util.Collections.unmodifiableList(controls_); } else { return controlsBuilder_.getMessageList(); } } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public int getControlsCount() { if (controlsBuilder_ == null) { return controls_.size(); } else { return controlsBuilder_.getCount(); } } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public com.google.cloud.retail.v2alpha.Control getControls(int index) { if (controlsBuilder_ == null) { return controls_.get(index); } else { return controlsBuilder_.getMessage(index); } } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder setControls(int index, com.google.cloud.retail.v2alpha.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.set(index, value); onChanged(); } else { controlsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder setControls( int index, com.google.cloud.retail.v2alpha.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.set(index, builderForValue.build()); onChanged(); } else { controlsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder addControls(com.google.cloud.retail.v2alpha.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.add(value); onChanged(); } else { controlsBuilder_.addMessage(value); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder addControls(int index, com.google.cloud.retail.v2alpha.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.add(index, value); onChanged(); } else { controlsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder addControls(com.google.cloud.retail.v2alpha.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(builderForValue.build()); onChanged(); } else { controlsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder addControls( int index, com.google.cloud.retail.v2alpha.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(index, builderForValue.build()); onChanged(); } else { controlsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder addAllControls( java.lang.Iterable<? extends com.google.cloud.retail.v2alpha.Control> values) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, controls_); onChanged(); } else { controlsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder clearControls() { if (controlsBuilder_ == null) { controls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { controlsBuilder_.clear(); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public Builder removeControls(int index) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.remove(index); onChanged(); } else { controlsBuilder_.remove(index); } return this; } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public com.google.cloud.retail.v2alpha.Control.Builder getControlsBuilder(int index) { return getControlsFieldBuilder().getBuilder(index); } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public com.google.cloud.retail.v2alpha.ControlOrBuilder getControlsOrBuilder(int index) { if (controlsBuilder_ == null) { return controls_.get(index); } else { return controlsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public java.util.List<? extends com.google.cloud.retail.v2alpha.ControlOrBuilder> getControlsOrBuilderList() { if (controlsBuilder_ != null) { return controlsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(controls_); } } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public com.google.cloud.retail.v2alpha.Control.Builder addControlsBuilder() { return getControlsFieldBuilder() .addBuilder(com.google.cloud.retail.v2alpha.Control.getDefaultInstance()); } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public com.google.cloud.retail.v2alpha.Control.Builder addControlsBuilder(int index) { return getControlsFieldBuilder() .addBuilder(index, com.google.cloud.retail.v2alpha.Control.getDefaultInstance()); } /** * * * <pre> * All the Controls for a given catalog. * </pre> * * <code>repeated .google.cloud.retail.v2alpha.Control controls = 1;</code> */ public java.util.List<com.google.cloud.retail.v2alpha.Control.Builder> getControlsBuilderList() { return getControlsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2alpha.Control, com.google.cloud.retail.v2alpha.Control.Builder, com.google.cloud.retail.v2alpha.ControlOrBuilder> getControlsFieldBuilder() { if (controlsBuilder_ == null) { controlsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2alpha.Control, com.google.cloud.retail.v2alpha.Control.Builder, com.google.cloud.retail.v2alpha.ControlOrBuilder>( controls_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); controls_ = null; } return controlsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2alpha.ListControlsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.retail.v2alpha.ListControlsResponse) private static final com.google.cloud.retail.v2alpha.ListControlsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.retail.v2alpha.ListControlsResponse(); } public static com.google.cloud.retail.v2alpha.ListControlsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListControlsResponse> PARSER = new com.google.protobuf.AbstractParser<ListControlsResponse>() { @java.lang.Override public ListControlsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListControlsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListControlsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.retail.v2alpha.ListControlsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
hibernate/query-validator
35,656
src/main/java/org/hibernate/query/validator/ProcessorSessionFactory.java
package org.hibernate.query.validator; import org.hibernate.PropertyNotFoundException; import org.hibernate.engine.spi.Mapping; import org.hibernate.type.BasicType; import org.hibernate.type.CollectionType; import org.hibernate.type.CompositeType; import org.hibernate.type.EntityType; import org.hibernate.type.ManyToOneType; import org.hibernate.type.Type; import org.hibernate.type.descriptor.java.EnumJavaType; import org.hibernate.type.descriptor.jdbc.IntegerJdbcType; import org.hibernate.type.descriptor.jdbc.JdbcType; import org.hibernate.type.descriptor.jdbc.VarcharJdbcType; import org.hibernate.type.internal.BasicTypeImpl; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.ModuleElement; import javax.lang.model.element.Name; import javax.lang.model.element.NestingKind; import javax.lang.model.element.PackageElement; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.type.TypeVariable; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; import jakarta.persistence.AccessType; import java.beans.Introspector; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static java.util.Arrays.stream; import static org.hibernate.internal.util.StringHelper.qualify; import static org.hibernate.internal.util.StringHelper.root; import static org.hibernate.internal.util.StringHelper.split; import static org.hibernate.internal.util.StringHelper.unroot; /** * @author Gavin King */ public abstract class ProcessorSessionFactory extends MockSessionFactory { static final Mocker<ProcessorSessionFactory> instance = Mocker.variadic(ProcessorSessionFactory.class); private static final Mocker<Component> component = Mocker.variadic(Component.class); private static final Mocker<ToManyAssociationPersister> toManyPersister = Mocker.variadic(ToManyAssociationPersister.class); private static final Mocker<ElementCollectionPersister> collectionPersister = Mocker.variadic(ElementCollectionPersister.class); private static final Mocker<EntityPersister> entityPersister = Mocker.variadic(EntityPersister.class); private static final CharSequence jakartaPersistence = new StringBuilder("jakarta").append('.').append("persistence"); private static final CharSequence javaxPersistence = new StringBuilder("javax").append('.').append("persistence"); private final Elements elementUtil; private final Types typeUtil; public ProcessorSessionFactory(ProcessingEnvironment processingEnv) { elementUtil = processingEnv.getElementUtils(); typeUtil = processingEnv.getTypeUtils(); } @Override MockEntityPersister createMockEntityPersister(String entityName) { TypeElement type = findEntityClass(entityName); return type == null ? null : entityPersister.make(entityName, type, this); } @Override MockCollectionPersister createMockCollectionPersister(String role) { String entityName = root(role); //only works because entity names don't contain dots String propertyPath = unroot(role); TypeElement entityClass = findEntityClass(entityName); AccessType defaultAccessType = getDefaultAccessType(entityClass); Element property = findPropertyByPath(entityClass, propertyPath, defaultAccessType); CollectionType collectionType = collectionType(memberType(property), role); if (isToManyAssociation(property)) { return toManyPersister.make(role, collectionType, getToManyTargetEntityName(property), this); } else if (isElementCollectionProperty(property)) { Element elementType = asElement(getElementCollectionElementType(property)); return collectionPersister.make(role, collectionType, elementType, propertyPath, defaultAccessType, this); } else { return null; } } @Override Type propertyType(String typeName, String propertyPath) { TypeElement type = findClassByQualifiedName(typeName); AccessType accessType = getAccessType(type, AccessType.FIELD); Element propertyByPath = findPropertyByPath(type, propertyPath, accessType); return propertyByPath == null ? null : propertyType(propertyByPath, typeName, propertyPath, accessType); } private static Element findPropertyByPath(TypeElement type, String propertyPath, AccessType defaultAccessType) { return stream(split(".", propertyPath)) .reduce((Element) type, (symbol, segment) -> dereference( defaultAccessType, symbol, segment ), (last, current) -> current); } private static Element dereference(AccessType defaultAccessType, Element symbol, String segment) { if (symbol == null) { return null; } else { Element element = asElement(symbol.asType()); return element instanceof TypeElement ? findProperty((TypeElement) element, segment, defaultAccessType) : null; } } static Type propertyType(Element member, String entityName, String path, AccessType defaultAccessType) { TypeMirror memberType = memberType(member); if (isEmbeddedProperty(member)) { return component.make(asElement(memberType), entityName, path, defaultAccessType); } else if (isToOneAssociation(member)) { String targetEntity = getToOneTargetEntity(member); return new ManyToOneType(typeConfiguration, targetEntity); } else if (isToManyAssociation(member)) { return collectionType(memberType, qualify(entityName, path)); } else if (isElementCollectionProperty(member)) { return collectionType(memberType, qualify(entityName, path)); } else if (isEnumProperty(member)) { return new BasicTypeImpl(new EnumJavaType(Enum.class), enumJdbcType(member)); } else { return typeConfiguration.getBasicTypeRegistry() .getRegisteredType(qualifiedName(memberType)); } } private static JdbcType enumJdbcType(Element member) { VariableElement mapping = (VariableElement) getAnnotationMember(getAnnotation(member,"Enumerated"), "value"); return mapping != null && mapping.getSimpleName().contentEquals("STRING") ? VarcharJdbcType.INSTANCE : IntegerJdbcType.INSTANCE; } private static Type elementCollectionElementType(TypeElement elementType, String role, String path, AccessType defaultAccessType) { if (isEmbeddableType(elementType)) { return component.make(elementType, role, path, defaultAccessType); } else { return typeConfiguration.getBasicTypeRegistry() .getRegisteredType(qualifiedName(elementType.asType())); } } private static CollectionType collectionType(TypeMirror type, String role) { return createCollectionType(role, simpleName(type)); } public static abstract class Component implements CompositeType { private final String[] propertyNames; private final Type[] propertyTypes; TypeElement type; public Component(TypeElement type, String entityName, String path, AccessType defaultAccessType) { this.type = type; List<String> names = new ArrayList<>(); List<Type> types = new ArrayList<>(); while (type!=null) { if (isMappedClass(type)) { //ignore unmapped intervening classes AccessType accessType = getAccessType(type, defaultAccessType); for (Element member: type.getEnclosedElements()) { if (isPersistable(member, accessType)) { String name = propertyName(member); Type propertyType = propertyType(member, entityName, qualify(path, name), defaultAccessType); if (propertyType != null) { names.add(name); types.add(propertyType); } } } } type = (TypeElement) asElement(type.getSuperclass()); } propertyNames = names.toArray(new String[0]); propertyTypes = types.toArray(new Type[0]); } @Override public int getPropertyIndex(String name) { String[] names = getPropertyNames(); for ( int i = 0, max = names.length; i < max; i++ ) { if ( names[i].equals( name ) ) { return i; } } throw new PropertyNotFoundException( "Could not resolve attribute '" + name + "' of '" + getName() + "'" ); } @Override public String getName() { return type.getSimpleName().toString(); } @Override public boolean isComponentType() { return true; } @Override public String[] getPropertyNames() { return propertyNames; } @Override public Type[] getSubtypes() { return propertyTypes; } @Override public boolean[] getPropertyNullability() { return new boolean[propertyNames.length]; } @Override public int getColumnSpan(Mapping mapping) { return propertyNames.length; } } public static abstract class EntityPersister extends MockEntityPersister { private final TypeElement type; private final javax.lang.model.util.Types typeUtil; public EntityPersister(String entityName, TypeElement type, ProcessorSessionFactory that) { super(entityName, getDefaultAccessType(type), that); this.type = type; this.typeUtil = that.typeUtil; initSubclassPersisters(); } @Override boolean isSubclassPersister(MockEntityPersister entityPersister) { EntityPersister persister = (EntityPersister) entityPersister; return typeUtil.isSubtype( persister.type.asType(), type.asType() ); } @Override Type createPropertyType(String propertyPath) { Element symbol = findPropertyByPath(type, propertyPath, defaultAccessType); return symbol == null ? null : propertyType(symbol, getEntityName(), propertyPath, defaultAccessType); } } public abstract static class ToManyAssociationPersister extends MockCollectionPersister { public ToManyAssociationPersister(String role, CollectionType collectionType, String targetEntityName, ProcessorSessionFactory that) { super(role, collectionType, new ManyToOneType(typeConfiguration, targetEntityName), that); } @Override Type getElementPropertyType(String propertyPath) { return getElementPersister().getPropertyType(propertyPath); } } public abstract static class ElementCollectionPersister extends MockCollectionPersister { private final TypeElement elementType; private final AccessType defaultAccessType; public ElementCollectionPersister(String role, CollectionType collectionType, TypeElement elementType, String propertyPath, AccessType defaultAccessType, ProcessorSessionFactory that) { super(role, collectionType, elementCollectionElementType(elementType, role, propertyPath, defaultAccessType), that); this.elementType = elementType; this.defaultAccessType = defaultAccessType; } @Override Type getElementPropertyType(String propertyPath) { Element symbol = findPropertyByPath(elementType, propertyPath, defaultAccessType); return symbol == null ? null : propertyType(symbol, getOwnerEntityName(), propertyPath, defaultAccessType); } } @Override boolean isEntityDefined(String entityName) { return findEntityClass(entityName) != null; } @Override String qualifyName(String entityName) { TypeElement entityClass = findEntityClass(entityName); return entityClass == null ? null : entityClass.getSimpleName().toString(); } @Override boolean isAttributeDefined(String entityName, String fieldName) { TypeElement entityClass = findEntityClass(entityName); return entityClass != null && findPropertyByPath(entityClass, fieldName, getDefaultAccessType(entityClass)) != null; } private TypeElement findEntityClass(String entityName) { if (entityName == null) { return null; } else if (entityName.indexOf('.')>0) { return findEntityByQualifiedName(entityName); } else { return findEntityByUnqualifiedName(entityName); } } private TypeElement findEntityByQualifiedName(String entityName) { TypeElement type = findClassByQualifiedName(entityName); return type != null && isEntity(type) ? type : null; } //Needed only for ECJ private final Map<String,TypeElement> entityCache = new HashMap<>(); private TypeElement findEntityByUnqualifiedName(String entityName) { TypeElement cached = entityCache.get(entityName); if ( cached != null ) { return cached; } TypeElement symbol = findEntityByUnqualifiedName(entityName, elementUtil.getModuleElement("")); if (symbol!=null) { entityCache.put(entityName, symbol); return symbol; } for (ModuleElement module: elementUtil.getAllModuleElements()) { symbol = findEntityByUnqualifiedName(entityName, module); if (symbol!=null) { entityCache.put(entityName, symbol); return symbol; } } return null; } private static TypeElement findEntityByUnqualifiedName(String entityName, ModuleElement module) { for (Element element: module.getEnclosedElements()) { if (element.getKind() == ElementKind.PACKAGE) { PackageElement pack = (PackageElement) element; try { for (Element member : pack.getEnclosedElements()) { if (isMatchingEntity(member, entityName)) { return (TypeElement) member; } } } catch (Exception e) {} } } return null; } private static boolean isMatchingEntity(Element symbol, String entityName) { if (symbol.getKind() == ElementKind.CLASS) { TypeElement type = (TypeElement) symbol; return isEntity(type) && getEntityName(type).equals(entityName); } else { return false; } } private static Element findProperty(TypeElement type, String propertyName, AccessType defaultAccessType) { //iterate up the superclass hierarchy while (type!=null) { if (isMappedClass(type)) { //ignore unmapped intervening classes AccessType accessType = getAccessType(type, defaultAccessType); for (Element member: type.getEnclosedElements()) { if (isMatchingProperty(member, propertyName, accessType)) { return member; } } } type = (TypeElement) asElement(type.getSuperclass()); } return null; } private static boolean isMatchingProperty(Element symbol, String propertyName, AccessType accessType) { return isPersistable(symbol, accessType) && propertyName.equals(propertyName(symbol)); } private static boolean isGetterMethod(ExecutableElement method) { if (!method.getParameters().isEmpty()) { return false; } else { Name methodName = method.getSimpleName(); TypeMirror returnType = method.getReturnType(); return methodName.subSequence(0,3).toString().equals("get") && returnType.getKind() != TypeKind.VOID || methodName.subSequence(0,2).toString().equals("is") && returnType.getKind() == TypeKind.BOOLEAN; } } private static boolean hasAnnotation(TypeMirror type, String annotationName) { return type.getKind() == TypeKind.DECLARED && getAnnotation(((DeclaredType) type).asElement(), annotationName)!=null; } private static boolean hasAnnotation(Element member, String annotationName) { return getAnnotation(member, annotationName)!=null; } private static AnnotationMirror getAnnotation(Element member, String annotationName) { for (AnnotationMirror mirror : member.getAnnotationMirrors()) { TypeElement annotationType = (TypeElement) mirror.getAnnotationType().asElement(); if ( annotationType.getSimpleName().contentEquals(annotationName) && annotationType.getNestingKind() == NestingKind.TOP_LEVEL ) { PackageElement pack = (PackageElement) annotationType.getEnclosingElement(); Name packageName = pack.getQualifiedName(); if (packageName.contentEquals(jakartaPersistence) || packageName.contentEquals(javaxPersistence)) { return mirror; } } } return null; } private static Object getAnnotationMember(AnnotationMirror annotation, String memberName) { if ( annotation == null ) { return null; } for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : annotation.getElementValues().entrySet()) { if (entry.getKey().getSimpleName().contentEquals(memberName)) { return entry.getValue().getValue(); } } return null; } private static boolean isMappedClass(TypeElement type) { return hasAnnotation(type, "Entity") || hasAnnotation(type, "Embeddable") || hasAnnotation(type, "MappedSuperclass"); } private static boolean isEntity(TypeElement member) { return member.getKind() == ElementKind.CLASS // && member.getAnnotation(entityAnnotation)!=null; && hasAnnotation(member, "Entity"); } private static boolean isId(Element member) { return hasAnnotation(member, "Id"); } private static boolean isStatic(Element member) { return member.getModifiers().contains(Modifier.STATIC); } private static boolean isTransient(Element member) { return hasAnnotation(member, "Transient") || member.getModifiers().contains(Modifier.TRANSIENT); } private static boolean isEnumProperty(Element member) { if (hasAnnotation(member, "Enumerated")) { return true; } else { TypeMirror type = member.asType(); if (type.getKind() == TypeKind.DECLARED) { DeclaredType declaredType = (DeclaredType) type; TypeElement typeElement = (TypeElement) declaredType.asElement(); //TODO: something better here! return typeElement.getSuperclass().toString().startsWith("java.lang.Enum"); } else { return false; } } } private static boolean isEmbeddableType(TypeElement type) { return hasAnnotation(type, "Embeddable"); } private static boolean isEmbeddedProperty(Element member) { if (hasAnnotation(member, "Embedded")) { return true; } else { TypeMirror type = member.asType(); return type.getKind() == TypeKind.DECLARED && hasAnnotation(type, "Embeddable"); } } private static boolean isElementCollectionProperty(Element member) { return hasAnnotation(member, "ElementCollection"); } private static boolean isToOneAssociation(Element member) { return hasAnnotation(member, "ManyToOne") || hasAnnotation(member, "OneToOne"); } private static boolean isToManyAssociation(Element member) { return hasAnnotation(member, "ManyToMany") || hasAnnotation(member, "OneToMany"); } private static AnnotationMirror toOneAnnotation(Element member) { AnnotationMirror manyToOne = getAnnotation(member, "ManyToOne"); if (manyToOne!=null) return manyToOne; AnnotationMirror oneToOne = getAnnotation(member, "OneToOne"); if (oneToOne!=null) return oneToOne; return null; } private static AnnotationMirror toManyAnnotation(Element member) { AnnotationMirror manyToMany = getAnnotation(member, "ManyToMany"); if (manyToMany!=null) return manyToMany; AnnotationMirror oneToMany = getAnnotation(member, "OneToMany"); if (oneToMany!=null) return oneToMany; return null; } private static String simpleName(TypeMirror type) { return type.getKind() == TypeKind.DECLARED ? simpleName(asElement(type)) : type.toString(); } private static String qualifiedName(TypeMirror type) { return type.getKind() == TypeKind.DECLARED ? qualifiedName(asElement(type)) : type.toString(); } private static String simpleName(Element type) { return type.getSimpleName().toString(); } private static String qualifiedName(Element type) { if ( type instanceof PackageElement ) { return ((PackageElement) type).getQualifiedName().toString(); } else if ( type instanceof TypeElement ) { return ((TypeElement) type).getQualifiedName().toString(); } else { Element enclosingElement = type.getEnclosingElement(); return enclosingElement != null ? qualifiedName(enclosingElement) + '.' + simpleName(type) : simpleName(type); } } private static AccessType getAccessType(TypeElement type, AccessType defaultAccessType) { AnnotationMirror annotation = getAnnotation(type, "Access"); if (annotation==null) { return defaultAccessType; } else { VariableElement member = (VariableElement) getAnnotationMember(annotation, "value"); if (member==null) { return defaultAccessType; //does not occur } switch (member.getSimpleName().toString()) { case "PROPERTY": return AccessType.PROPERTY; case "FIELD": return AccessType.FIELD; default: throw new IllegalStateException(); } } } private static String getEntityName(TypeElement type) { if ( type == null ) { return null; } AnnotationMirror entityAnnotation = getAnnotation(type, "Entity"); if (entityAnnotation==null) { //not an entity! return null; } else { String name = (String) getAnnotationMember(entityAnnotation, "name"); //entity names are unqualified class names return name==null ? simpleName(type) : name; } } private TypeMirror getCollectionElementType(Element property) { DeclaredType declaredType = (DeclaredType) memberType(property); List<? extends TypeMirror> typeArguments = declaredType.getTypeArguments(); TypeMirror elementType = typeArguments.get(typeArguments.size()-1); return elementType==null ? elementUtil.getTypeElement("java.lang.Object").asType() : elementType; } private static String getToOneTargetEntity(Element property) { AnnotationMirror annotation = toOneAnnotation(property); TypeMirror classType = (TypeMirror) getAnnotationMember(annotation, "targetEntity"); TypeMirror targetType = classType == null || classType.getKind() == TypeKind.VOID ? memberType(property) : classType; Element element = asElement(targetType); return element != null && element.getKind() == ElementKind.CLASS //entity names are unqualified class names ? getEntityName((TypeElement) element) : null; } private String getToManyTargetEntityName(Element property) { AnnotationMirror annotation = toManyAnnotation(property); TypeMirror classType = (TypeMirror) getAnnotationMember(annotation, "targetEntity"); TypeMirror targetType = classType == null || classType.getKind() == TypeKind.VOID ? getCollectionElementType(property) : classType; Element element = asElement(targetType); return element != null && element.getKind() == ElementKind.CLASS //entity names are unqualified class names ? getEntityName((TypeElement) element) : null; } private TypeMirror getElementCollectionElementType(Element property) { AnnotationMirror annotation = getAnnotation(property, "ElementCollection"); TypeMirror classType = (TypeMirror) getAnnotationMember(annotation, "getElementCollectionClass"); return classType == null || classType.getKind() == TypeKind.VOID ? getCollectionElementType(property) : classType; } @Override protected String getSupertype(String entityName) { return asElement(findEntityClass(entityName).getSuperclass()) .getSimpleName().toString(); } @Override protected boolean isSubtype(String entityName, String subtypeEntityName) { return typeUtil.isSubtype( findEntityClass(entityName).asType(), findEntityClass(subtypeEntityName).asType()); } @Override boolean isClassDefined(String qualifiedName) { return findClassByQualifiedName(qualifiedName)!=null; } @Override boolean isFieldDefined(String qualifiedClassName, String fieldName) { TypeElement type = findClassByQualifiedName(qualifiedClassName); return type != null && type.getEnclosedElements().stream() .anyMatch(element -> element.getKind() == ElementKind.FIELD && element.getSimpleName().contentEquals(fieldName)); } @Override boolean isConstructorDefined(String qualifiedClassName, List<org.hibernate.type.Type> argumentTypes) { TypeElement symbol = findClassByQualifiedName(qualifiedClassName); if (symbol==null) return false; for (Element cons: symbol.getEnclosedElements()) { if ( cons.getKind() == ElementKind.CONSTRUCTOR ) { ExecutableElement constructor = (ExecutableElement) cons; List<? extends VariableElement> parameters = constructor.getParameters(); if (parameters.size()==argumentTypes.size()) { boolean argumentsCheckOut = true; for (int i=0; i<argumentTypes.size(); i++) { org.hibernate.type.Type type = argumentTypes.get(i); VariableElement param = parameters.get(i); if (param.asType().getKind().isPrimitive()) { Class<?> primitive; try { primitive = toPrimitiveClass( type.getReturnedClass() ); } catch (Exception e) { continue; } if (!toPrimitiveClass(param).equals(primitive)) { argumentsCheckOut = false; break; } } else { TypeElement typeClass; if (type instanceof EntityType) { EntityType entityType = (EntityType) type; String entityName = entityType.getAssociatedEntityName(); typeClass = findEntityClass(entityName); } //TODO: // else if (type instanceof CompositeCustomType) { // typeClass = ((Component) ((CompositeCustomType) type).getUserType()).type; // } else if (type instanceof BasicType) { String className; //sadly there is no way to get the classname //from a Hibernate Type without trying to load //the class! try { className = type.getReturnedClass().getName(); } catch (Exception e) { continue; } typeClass = findClassByQualifiedName(className); } else { //TODO: what other Hibernate Types do we // need to consider here? continue; } if (typeClass != null && !typeUtil.isSubtype( typeClass.asType(), param.asType() ) ) { argumentsCheckOut = false; break; } } } if (argumentsCheckOut) { return true; //matching constructor found! } } } } return false; } private static Class<?> toPrimitiveClass(VariableElement param) { switch (param.asType().getKind()) { case BOOLEAN: return boolean.class; case CHAR: return char.class; case INT: return int.class; case SHORT: return short.class; case BYTE: return byte.class; case LONG: return long.class; case FLOAT: return float.class; case DOUBLE: return double.class; default: return Object.class; } } private TypeElement findClassByQualifiedName(String path) { return path == null ? null : elementUtil.getTypeElement(path); } private static AccessType getDefaultAccessType(TypeElement type) { //iterate up the superclass hierarchy while (type!=null) { for (Element member: type.getEnclosedElements()) { if (isId(member)) { return member instanceof ExecutableElement ? AccessType.PROPERTY : AccessType.FIELD; } } type = (TypeElement) asElement(type.getSuperclass()); } return AccessType.FIELD; } private static String propertyName(Element symbol) { String name = symbol.getSimpleName().toString(); if (symbol.getKind() == ElementKind.METHOD) { if (name.startsWith("get")) { name = name.substring(3); } else if (name.startsWith("is")) { name = name.substring(2); } return Introspector.decapitalize(name); } else { return name; } } private static boolean isPersistable(Element member, AccessType accessType) { if (isStatic(member) || isTransient(member)) { return false; } else if (member.getKind() == ElementKind.FIELD) { return accessType == AccessType.FIELD // || member.getAnnotation( accessAnnotation ) != null; || hasAnnotation(member, "Access"); } else if (member.getKind() == ElementKind.METHOD) { return isGetterMethod((ExecutableElement) member) && (accessType == AccessType.PROPERTY // || member.getAnnotation( accessAnnotation ) != null); || hasAnnotation(member, "Access")); } else { return false; } } private static TypeMirror memberType(Element member) { if (member instanceof ExecutableElement) { return ((ExecutableElement) member).getReturnType(); } else if (member instanceof VariableElement) { return member.asType(); } else { throw new IllegalArgumentException("Not a member"); } } public static Element asElement(TypeMirror type) { if ( type == null ) { return null; } else { switch (type.getKind()) { case DECLARED: return ((DeclaredType)type).asElement(); case TYPEVAR: return ((TypeVariable)type).asElement(); default: return null; } } } }
googleapis/google-cloud-java
35,429
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/schema/predict/instance/TextExtractionPredictionInstance.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/schema/predict/instance/text_extraction.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1.schema.predict.instance; /** * * * <pre> * Prediction input format for Text Extraction. * </pre> * * Protobuf type {@code * google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance} */ public final class TextExtractionPredictionInstance extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance) TextExtractionPredictionInstanceOrBuilder { private static final long serialVersionUID = 0L; // Use TextExtractionPredictionInstance.newBuilder() to construct. private TextExtractionPredictionInstance( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TextExtractionPredictionInstance() { content_ = ""; mimeType_ = ""; key_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TextExtractionPredictionInstance(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstanceProto .internal_static_google_cloud_aiplatform_v1_schema_predict_instance_TextExtractionPredictionInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstanceProto .internal_static_google_cloud_aiplatform_v1_schema_predict_instance_TextExtractionPredictionInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance .class, com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance .Builder.class); } public static final int CONTENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object content_ = ""; /** * * * <pre> * The text snippet to make the predictions on. * </pre> * * <code>string content = 1;</code> * * @return The content. */ @java.lang.Override public java.lang.String getContent() { java.lang.Object ref = content_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); content_ = s; return s; } } /** * * * <pre> * The text snippet to make the predictions on. * </pre> * * <code>string content = 1;</code> * * @return The bytes for content. */ @java.lang.Override public com.google.protobuf.ByteString getContentBytes() { java.lang.Object ref = content_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); content_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int MIME_TYPE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object mimeType_ = ""; /** * * * <pre> * The MIME type of the text snippet. The supported MIME types are listed * below. * - text/plain * </pre> * * <code>string mime_type = 2;</code> * * @return The mimeType. */ @java.lang.Override public java.lang.String getMimeType() { java.lang.Object ref = mimeType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); mimeType_ = s; return s; } } /** * * * <pre> * The MIME type of the text snippet. The supported MIME types are listed * below. * - text/plain * </pre> * * <code>string mime_type = 2;</code> * * @return The bytes for mimeType. */ @java.lang.Override public com.google.protobuf.ByteString getMimeTypeBytes() { java.lang.Object ref = mimeType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); mimeType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int KEY_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * * * <pre> * This field is only used for batch prediction. If a key is provided, the * batch prediction result will by mapped to this key. If omitted, then the * batch prediction result will contain the entire input instance. Vertex AI * will not check if keys in the request are duplicates, so it is up to the * caller to ensure the keys are unique. * </pre> * * <code>string key = 3;</code> * * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); key_ = s; return s; } } /** * * * <pre> * This field is only used for batch prediction. If a key is provided, the * batch prediction result will by mapped to this key. If omitted, then the * batch prediction result will contain the entire input instance. Vertex AI * will not check if keys in the request are duplicates, so it is up to the * caller to ensure the keys are unique. * </pre> * * <code>string key = 3;</code> * * @return The bytes for key. */ @java.lang.Override public com.google.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); key_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, content_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, mimeType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, key_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, content_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, mimeType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, key_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance other = (com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance) obj; if (!getContent().equals(other.getContent())) return false; if (!getMimeType().equals(other.getMimeType())) return false; if (!getKey().equals(other.getKey())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CONTENT_FIELD_NUMBER; hash = (53 * hash) + getContent().hashCode(); hash = (37 * hash) + MIME_TYPE_FIELD_NUMBER; hash = (53 * hash) + getMimeType().hashCode(); hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Prediction input format for Text Extraction. * </pre> * * Protobuf type {@code * google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance) com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstanceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstanceProto .internal_static_google_cloud_aiplatform_v1_schema_predict_instance_TextExtractionPredictionInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstanceProto .internal_static_google_cloud_aiplatform_v1_schema_predict_instance_TextExtractionPredictionInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance.class, com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance.Builder.class); } // Construct using // com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; content_ = ""; mimeType_ = ""; key_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstanceProto .internal_static_google_cloud_aiplatform_v1_schema_predict_instance_TextExtractionPredictionInstance_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance build() { com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance buildPartial() { com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance result = new com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.content_ = content_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.mimeType_ = mimeType_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.key_ = key_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance) { return mergeFrom( (com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance other) { if (other == com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance .getDefaultInstance()) return this; if (!other.getContent().isEmpty()) { content_ = other.content_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getMimeType().isEmpty()) { mimeType_ = other.mimeType_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getKey().isEmpty()) { key_ = other.key_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { content_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { mimeType_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { key_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object content_ = ""; /** * * * <pre> * The text snippet to make the predictions on. * </pre> * * <code>string content = 1;</code> * * @return The content. */ public java.lang.String getContent() { java.lang.Object ref = content_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); content_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The text snippet to make the predictions on. * </pre> * * <code>string content = 1;</code> * * @return The bytes for content. */ public com.google.protobuf.ByteString getContentBytes() { java.lang.Object ref = content_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); content_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The text snippet to make the predictions on. * </pre> * * <code>string content = 1;</code> * * @param value The content to set. * @return This builder for chaining. */ public Builder setContent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } content_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The text snippet to make the predictions on. * </pre> * * <code>string content = 1;</code> * * @return This builder for chaining. */ public Builder clearContent() { content_ = getDefaultInstance().getContent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The text snippet to make the predictions on. * </pre> * * <code>string content = 1;</code> * * @param value The bytes for content to set. * @return This builder for chaining. */ public Builder setContentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); content_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object mimeType_ = ""; /** * * * <pre> * The MIME type of the text snippet. The supported MIME types are listed * below. * - text/plain * </pre> * * <code>string mime_type = 2;</code> * * @return The mimeType. */ public java.lang.String getMimeType() { java.lang.Object ref = mimeType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); mimeType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The MIME type of the text snippet. The supported MIME types are listed * below. * - text/plain * </pre> * * <code>string mime_type = 2;</code> * * @return The bytes for mimeType. */ public com.google.protobuf.ByteString getMimeTypeBytes() { java.lang.Object ref = mimeType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); mimeType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The MIME type of the text snippet. The supported MIME types are listed * below. * - text/plain * </pre> * * <code>string mime_type = 2;</code> * * @param value The mimeType to set. * @return This builder for chaining. */ public Builder setMimeType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } mimeType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The MIME type of the text snippet. The supported MIME types are listed * below. * - text/plain * </pre> * * <code>string mime_type = 2;</code> * * @return This builder for chaining. */ public Builder clearMimeType() { mimeType_ = getDefaultInstance().getMimeType(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The MIME type of the text snippet. The supported MIME types are listed * below. * - text/plain * </pre> * * <code>string mime_type = 2;</code> * * @param value The bytes for mimeType to set. * @return This builder for chaining. */ public Builder setMimeTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); mimeType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object key_ = ""; /** * * * <pre> * This field is only used for batch prediction. If a key is provided, the * batch prediction result will by mapped to this key. If omitted, then the * batch prediction result will contain the entire input instance. Vertex AI * will not check if keys in the request are duplicates, so it is up to the * caller to ensure the keys are unique. * </pre> * * <code>string key = 3;</code> * * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); key_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * This field is only used for batch prediction. If a key is provided, the * batch prediction result will by mapped to this key. If omitted, then the * batch prediction result will contain the entire input instance. Vertex AI * will not check if keys in the request are duplicates, so it is up to the * caller to ensure the keys are unique. * </pre> * * <code>string key = 3;</code> * * @return The bytes for key. */ public com.google.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); key_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * This field is only used for batch prediction. If a key is provided, the * batch prediction result will by mapped to this key. If omitted, then the * batch prediction result will contain the entire input instance. Vertex AI * will not check if keys in the request are duplicates, so it is up to the * caller to ensure the keys are unique. * </pre> * * <code>string key = 3;</code> * * @param value The key to set. * @return This builder for chaining. */ public Builder setKey(java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * This field is only used for batch prediction. If a key is provided, the * batch prediction result will by mapped to this key. If omitted, then the * batch prediction result will contain the entire input instance. Vertex AI * will not check if keys in the request are duplicates, so it is up to the * caller to ensure the keys are unique. * </pre> * * <code>string key = 3;</code> * * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * This field is only used for batch prediction. If a key is provided, the * batch prediction result will by mapped to this key. If omitted, then the * batch prediction result will contain the entire input instance. Vertex AI * will not check if keys in the request are duplicates, so it is up to the * caller to ensure the keys are unique. * </pre> * * <code>string key = 3;</code> * * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); key_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance) private static final com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance(); } public static com.google.cloud.aiplatform.v1.schema.predict.instance .TextExtractionPredictionInstance getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TextExtractionPredictionInstance> PARSER = new com.google.protobuf.AbstractParser<TextExtractionPredictionInstance>() { @java.lang.Override public TextExtractionPredictionInstance parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TextExtractionPredictionInstance> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TextExtractionPredictionInstance> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.instance.TextExtractionPredictionInstance getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/geode
35,741
geode-core/src/test/java/org/apache/geode/internal/offheap/OffHeapRegionEntryHelperInstanceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.offheap; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.COMPRESSED_BIT; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.DESTROYED_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.END_OF_STREAM_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.INVALID_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.LOCAL_INVALID_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.NOT_AVAILABLE_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.NULL_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.REMOVED_PHASE1_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.REMOVED_PHASE2_ADDRESS; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.SERIALIZED_BIT; import static org.apache.geode.internal.offheap.OffHeapRegionEntryHelperInstance.TOMBSTONE_ADDRESS; import static org.apache.geode.util.internal.UncheckedUtils.uncheckedCast; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowable; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; import java.nio.ByteBuffer; import java.util.function.Function; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.apache.geode.compression.Compressor; import org.apache.geode.internal.cache.CachePerfStats; import org.apache.geode.internal.cache.CachedDeserializable; import org.apache.geode.internal.cache.DiskId; import org.apache.geode.internal.cache.EntryEventImpl; import org.apache.geode.internal.cache.RegionEntryContext; import org.apache.geode.internal.cache.Token; import org.apache.geode.internal.cache.VMCachedDeserializable; import org.apache.geode.internal.cache.entries.DiskEntry; import org.apache.geode.internal.cache.entries.OffHeapRegionEntry; import org.apache.geode.internal.cache.entries.VersionedStatsDiskRegionEntryOffHeap; import org.apache.geode.internal.offheap.MemoryAllocatorImpl.DummyNonRealTimeStatsUpdater; import org.apache.geode.internal.serialization.DSCODE; public class OffHeapRegionEntryHelperInstanceTest { private static final long VALUE_IS_NOT_ENCODABLE = 0L; private MemoryAllocator memoryAllocator; private ReferenceCounterInstance referenceCounter; private OffHeapStoredObject offHeapStoredObject; private OffHeapRegionEntryHelperInstance offHeapRegionEntryHelperInstance; @Before public void setUp() { OutOfOffHeapMemoryListener listener = mock(OutOfOffHeapMemoryListener.class); OffHeapMemoryStats stats = mock(OffHeapMemoryStats.class); Function<Long, OffHeapStoredObject> offHeapStoredObjectFactory = uncheckedCast(mock(Function.class)); offHeapStoredObject = mock(OffHeapStoredObject.class); referenceCounter = mock(ReferenceCounterInstance.class); when(offHeapStoredObjectFactory.apply(anyLong())) .thenReturn(offHeapStoredObject); memoryAllocator = MemoryAllocatorImpl.create(listener, stats, 1, OffHeapStorage.MIN_SLAB_SIZE, OffHeapStorage.MIN_SLAB_SIZE, null, () -> new DummyNonRealTimeStatsUpdater()); offHeapRegionEntryHelperInstance = spy(new OffHeapRegionEntryHelperInstance(ohAddress -> offHeapStoredObject, referenceCounter)); } @After public void tearDown() { MemoryAllocatorImpl.freeOffHeapMemory(); } @Test public void encodeDataAsAddressShouldReturnZeroIfValueIsGreaterThanSevenBytes() { byte[] valueInBytes = ByteBuffer.allocate(Long.SIZE / Byte.SIZE).putLong(Long.MAX_VALUE).array(); assertThat(valueInBytes.length) .isGreaterThanOrEqualTo(OffHeapRegionEntryHelperInstance.MAX_LENGTH_FOR_DATA_AS_ADDRESS); long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(valueInBytes, false, false); assertThat(encodedAddress) .isEqualTo(VALUE_IS_NOT_ENCODABLE); } @Test public void encodeDataAsAddressShouldEncodeLongIfItsSerializedAndIfItsNotTooBig() { byte[] valueInBytes = EntryEventImpl.serialize(0L); boolean isSerialized = true; boolean isCompressed = false; long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(valueInBytes, isSerialized, isCompressed); long expectedEncodedAddress = 123L; assertThat(encodedAddress) .isEqualTo(expectedEncodedAddress); assertSerializedAndCompressedBits(encodedAddress, isSerialized, isCompressed); } @Test public void encodeDataAsAddressShouldReturnZeroIfValueIsLongAndItIsSerializedAndBig() { byte[] valueInBytes = EntryEventImpl.serialize(Long.MAX_VALUE); long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(valueInBytes, true, false); assertThat(encodedAddress) .isEqualTo(VALUE_IS_NOT_ENCODABLE); } @Test public void encodeDataAsAddressShouldReturnZeroIfValueIsLargerThanEightBytesAndNotLong() { byte[] someValue = new byte[8]; someValue[0] = DSCODE.CLASS.toByte(); long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(someValue, true, false); assertThat(encodedAddress) .isEqualTo(VALUE_IS_NOT_ENCODABLE); } @Test public void encodeDataAsAddressShouldReturnValidAddressIfValueIsLesserThanSevenBytes() { byte[] valueInBytes = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); boolean isSerialized = false; boolean isCompressed = false; long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(valueInBytes, isSerialized, isCompressed); long expectedAddress = 549755813697L; assertThat(encodedAddress) .isEqualTo(expectedAddress); assertSerializedAndCompressedBits(encodedAddress, isSerialized, isCompressed); } @Test public void encodeDataAsAddressShouldSetSerializedBitIfSerialized() { byte[] valueInBytes = EntryEventImpl.serialize(Integer.MAX_VALUE); boolean isSerialized = true; boolean isCompressed = false; long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(valueInBytes, isSerialized, isCompressed); long expectedAddress = 63221918596947L; assertThat(expectedAddress) .isEqualTo(encodedAddress); assertSerializedAndCompressedBits(encodedAddress, isSerialized, isCompressed); } @Test public void encodeDataAsAddressShouldSetSerializedBitIfCompressed() { byte[] valueInBytes = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); boolean isSerialized = false; boolean isCompressed = true; long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(valueInBytes, isSerialized, isCompressed); long expectedAddress = 549755813701L; assertThat(encodedAddress) .isEqualTo(expectedAddress); assertSerializedAndCompressedBits(encodedAddress, isSerialized, isCompressed); } @Test public void encodeDataAsAddressShouldSetBothSerializedAndCompressedBitsIfSerializedAndCompressed() { byte[] valueInBytes = EntryEventImpl.serialize(Integer.MAX_VALUE); boolean isSerialized = true; boolean isCompressed = true; long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(valueInBytes, isSerialized, isCompressed); long expectedAddress = 63221918596951L; assertThat(expectedAddress) .isEqualTo(encodedAddress); assertSerializedAndCompressedBits(encodedAddress, isSerialized, isCompressed); } @Test public void decodeUncompressedAddressToBytesShouldReturnActualBytes() { long encodedAddress = 549755813697L; int value = Integer.MAX_VALUE; byte[] actual = offHeapRegionEntryHelperInstance.decodeUncompressedAddressToBytes(encodedAddress); byte[] expectedValue = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(value).array(); assertThat(actual) .isEqualTo(expectedValue); } @Test public void decodeUncompressedAddressToBytesShouldDecodeLongIfItsSerializedAndIfItsNotTooBig() { byte[] actual = offHeapRegionEntryHelperInstance.decodeUncompressedAddressToBytes(123L); byte[] expectedValue = EntryEventImpl.serialize(0L); assertThat(actual) .isEqualTo(expectedValue); } @Test public void decodeUncompressedAddressToBytesWithCompressedAddressShouldThrowException() { long encodedAddress = 549755813703L; Throwable thrown = catchThrowable(() -> { offHeapRegionEntryHelperInstance.decodeUncompressedAddressToBytes(encodedAddress); }); assertThat(thrown) .isInstanceOf(AssertionError.class); } @Test public void decodeCompressedDataAsAddressToRawBytes() { long encodedAddress = 549755813703L; byte[] expected = new byte[] {127, -1, -1, -1}; byte[] bytes = offHeapRegionEntryHelperInstance.decodeAddressToRawBytes(encodedAddress); assertThat(bytes) .isEqualTo(expected); } @Test public void encodedAddressShouldBeDecodableEvenIfValueIsSerialized() { int value = Integer.MAX_VALUE; byte[] serializedValue = EntryEventImpl.serialize(value); long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(serializedValue, true, false); int actualValue = (int) offHeapRegionEntryHelperInstance.decodeAddressToObject(encodedAddress); assertThat(actualValue) .isEqualTo(value); } @Test public void encodedAddressShouldBeDecodableEvenIfValueIsUnserialized() { int value = Integer.MAX_VALUE; byte[] unSerializedValue = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(value).array(); long encodedAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(unSerializedValue, false, false); byte[] actualValue = (byte[]) offHeapRegionEntryHelperInstance.decodeAddressToObject(encodedAddress); assertThat(actualValue) .isEqualTo(unSerializedValue); } @Test public void isSerializedShouldReturnTrueIfSerialized() { assertThat(offHeapRegionEntryHelperInstance.isSerialized(1000010L)).isTrue(); } @Test public void isSerializedShouldReturnFalseIfNotSerialized() { assertThat(offHeapRegionEntryHelperInstance.isSerialized(1000000L)).isFalse(); } @Test public void isCompressedShouldReturnTrueIfCompressed() { assertThat(offHeapRegionEntryHelperInstance.isCompressed(1000100L)).isTrue(); } @Test public void isCompressedShouldReturnFalseIfNotCompressed() { assertThat(offHeapRegionEntryHelperInstance.isCompressed(1000000L)).isFalse(); } @Test public void isOffHeapShouldReturnTrueIfAddressIsOnOffHeap() { OffHeapStoredObject value = createChunk(Long.MAX_VALUE); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(value.getAddress())).isTrue(); } @Test public void isOffHeapShouldReturnFalseIfAddressIsAnEncodedAddress() { byte[] data = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); long address = offHeapRegionEntryHelperInstance.encodeDataAsAddress(data, false, false); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(address)).isFalse(); } @Test public void isOffHeapShouldReturnFalseForAnyTokenAddress() { assertThat(offHeapRegionEntryHelperInstance.isOffHeap(NULL_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(INVALID_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(LOCAL_INVALID_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(DESTROYED_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(REMOVED_PHASE1_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(REMOVED_PHASE2_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(END_OF_STREAM_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(NOT_AVAILABLE_ADDRESS)).isFalse(); assertThat(offHeapRegionEntryHelperInstance.isOffHeap(TOMBSTONE_ADDRESS)).isFalse(); } @Test public void setValueShouldChangeTheRegionEntryAddressToNewAddress() { // mock region entry OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); // some old address long oldAddress = 1L; // testing when the newValue is a chunk OffHeapStoredObject newValue = createChunk(Long.MAX_VALUE); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, newValue.getAddress())).thenReturn(Boolean.TRUE); // invoke the method under test offHeapRegionEntryHelperInstance.setValue(regionEntry, newValue); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, newValue.getAddress()); // resetting the spy in-order to re-use reset(regionEntry); // testing when the newValue is DataAsAddress TinyStoredObject newAddress1 = new TinyStoredObject(2L); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, newAddress1.getAddress())).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, newAddress1); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, newAddress1.getAddress()); reset(regionEntry); // Testing when newValue is Token Objects // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, NULL_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, null); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, NULL_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, INVALID_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.INVALID); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, INVALID_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, LOCAL_INVALID_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.LOCAL_INVALID); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, LOCAL_INVALID_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, DESTROYED_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.DESTROYED); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, DESTROYED_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, REMOVED_PHASE1_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.REMOVED_PHASE1); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, REMOVED_PHASE1_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, REMOVED_PHASE2_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.REMOVED_PHASE2); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, REMOVED_PHASE2_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, END_OF_STREAM_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.END_OF_STREAM); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, END_OF_STREAM_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, NOT_AVAILABLE_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.NOT_AVAILABLE); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, NOT_AVAILABLE_ADDRESS); reset(regionEntry); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, TOMBSTONE_ADDRESS)).thenReturn(true); offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.TOMBSTONE); // verify oldAddress is replaced with newAddress verify(regionEntry).setAddress(oldAddress, TOMBSTONE_ADDRESS); } @Test public void setValueShouldChangeTheRegionEntryAddressToNewAddressAndReleaseOldValueIfItsOnOffHeap() { OffHeapStoredObject oldValue = createChunk(Long.MAX_VALUE); OffHeapStoredObject newValue = createChunk(Long.MAX_VALUE - 1); OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); // mock Chunk static methods - in-order to verify that release is called doNothing().when(offHeapStoredObject).release(); // mock region entry methods required for test when(regionEntry.getAddress()) .thenReturn(oldValue.getAddress()); when(regionEntry.setAddress(oldValue.getAddress(), newValue.getAddress())) .thenReturn(Boolean.TRUE); // invoke the method under test offHeapRegionEntryHelperInstance.setValue(regionEntry, newValue); // verify oldAddress is changed to newAddress verify(regionEntry) .setAddress(oldValue.getAddress(), newValue.getAddress()); // verify oldAddress is released verify(referenceCounter) .release(oldValue.getAddress()); } @Test public void setValueShouldChangeTheRegionEntryAddressToNewAddressAndDoesNothingIfOldAddressIsAnEncodedAddress() { byte[] oldData = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); byte[] newData = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE - 1).array(); long oldAddress = offHeapRegionEntryHelperInstance.encodeDataAsAddress(oldData, false, false); StoredObject newAddress = new TinyStoredObject( offHeapRegionEntryHelperInstance.encodeDataAsAddress(newData, false, false)); OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); // mock region entry methods required for test when(regionEntry.getAddress()) .thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, newAddress.getAddress())) .thenReturn(true); // invoke the method under test offHeapRegionEntryHelperInstance.setValue(regionEntry, newAddress); // verify oldAddress is changed to newAddress verify(regionEntry) .setAddress(oldAddress, newAddress.getAddress()); // verify that release is never called as the old address is not on offheap verifyNoInteractions(offHeapStoredObject); } @Test public void setValueShouldChangeTheRegionEntryAddressToNewAddressAndDoesNothingIfOldAddressIsATokenAddress() { long oldAddress = REMOVED_PHASE1_ADDRESS; long newAddress = REMOVED_PHASE2_ADDRESS; OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); when(regionEntry.getAddress()) .thenReturn(oldAddress); when(regionEntry.setAddress(oldAddress, newAddress)) .thenReturn(true); // invoke the method under test offHeapRegionEntryHelperInstance.setValue(regionEntry, Token.REMOVED_PHASE2); // verify oldAddress is changed to newAddress verify(regionEntry) .setAddress(oldAddress, newAddress); } @Test public void setValueShouldThrowIllegalExceptionIfNewValueCannotBeConvertedToAddress() { OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); when(regionEntry.getAddress()) .thenReturn(1L); // invoke the method under test with some object other than Chunk/DataAsAddress/Token Throwable thrown = catchThrowable(() -> { offHeapRegionEntryHelperInstance.setValue(regionEntry, new Object()); }); assertThat(thrown) .isInstanceOf(IllegalStateException.class); } @Test public void getValueAsTokenShouldReturnNotATokenIfValueIsOnOffHeap() { OffHeapStoredObject chunk = createChunk(Long.MAX_VALUE); OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); when(regionEntry.getAddress()) .thenReturn(chunk.getAddress()); Token token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token) .isEqualTo(Token.NOT_A_TOKEN); } @Test public void getValueAsTokenShouldReturnNotATokenIfValueIsEncoded() { byte[] data = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); long address = offHeapRegionEntryHelperInstance.encodeDataAsAddress(data, false, false); OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); when(regionEntry.getAddress()) .thenReturn(address); Token token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token) .isEqualTo(Token.NOT_A_TOKEN); } @Test public void getValueAsTokenShouldReturnAValidToken() { OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); when(regionEntry.getAddress()).thenReturn(NULL_ADDRESS); Token token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isNull(); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(INVALID_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.INVALID); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(LOCAL_INVALID_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.LOCAL_INVALID); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(DESTROYED_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.DESTROYED); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(REMOVED_PHASE1_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.REMOVED_PHASE1); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(REMOVED_PHASE2_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.REMOVED_PHASE2); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(END_OF_STREAM_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.END_OF_STREAM); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(NOT_AVAILABLE_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.NOT_AVAILABLE); // mock region entry methods required for test when(regionEntry.getAddress()).thenReturn(TOMBSTONE_ADDRESS); token = offHeapRegionEntryHelperInstance.getValueAsToken(regionEntry); assertThat(token).isEqualTo(Token.TOMBSTONE); } @Test public void addressToObjectShouldReturnValueFromChunk() { OffHeapRegionEntryHelperInstance offHeapRegionEntryHelperInstance = new OffHeapRegionEntryHelperInstance(); OffHeapStoredObject expected = createChunk(Long.MAX_VALUE); Object actual = offHeapRegionEntryHelperInstance.addressToObject(expected.getAddress(), false, null); assertThat(actual) .isInstanceOf(OffHeapStoredObject.class) .isEqualTo(expected); } @Test public void addressToObjectShouldReturnCachedDeserializableFromChunkIfAskedToDecompress() { byte[] data = EntryEventImpl.serialize(Long.MAX_VALUE); RegionEntryContext regionContext = mock(RegionEntryContext.class); CachePerfStats cacheStats = mock(CachePerfStats.class); Compressor compressor = mock(Compressor.class); when(regionContext.getCompressor()) .thenReturn(compressor); when(compressor.decompress(data)) .thenReturn(data); when(regionContext.getCachePerfStats()) .thenReturn(cacheStats); when(cacheStats.startDecompression()) .thenReturn(10000L); MemoryBlock chunk = (MemoryBlock) memoryAllocator.allocateAndInitialize(data, true, true); offHeapRegionEntryHelperInstance = spy(new OffHeapRegionEntryHelperInstance(OffHeapStoredObject::new, referenceCounter)); Object actual = offHeapRegionEntryHelperInstance.addressToObject(chunk.getAddress(), true, regionContext); assertThat(actual) .isInstanceOf(VMCachedDeserializable.class); long actualValue = (long) ((CachedDeserializable) actual).getDeserializedForReading(); assertThat(actualValue) .isEqualTo(Long.MAX_VALUE); } @Test public void addressToObjectShouldReturnDecompressedValueFromChunkIfAskedToDecompress() { byte[] data = ByteBuffer.allocate(Long.SIZE / Byte.SIZE).putLong(Long.MAX_VALUE).array(); RegionEntryContext regionContext = mock(RegionEntryContext.class); CachePerfStats cacheStats = mock(CachePerfStats.class); Compressor compressor = mock(Compressor.class); when(regionContext.getCompressor()) .thenReturn(compressor); when(compressor.decompress(data)) .thenReturn(data); when(regionContext.getCachePerfStats()) .thenReturn(cacheStats); when(cacheStats.startDecompression()) .thenReturn(10000L); MemoryBlock chunk = (MemoryBlock) memoryAllocator.allocateAndInitialize(data, false, true); offHeapRegionEntryHelperInstance = spy(new OffHeapRegionEntryHelperInstance(OffHeapStoredObject::new, referenceCounter)); Object actual = offHeapRegionEntryHelperInstance.addressToObject(chunk.getAddress(), true, regionContext); assertThat(actual) .isInstanceOf(byte[].class) .isEqualTo(data); } @Test public void addressToObjectShouldReturnValueFromDataAsAddress() { byte[] data = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); long address = offHeapRegionEntryHelperInstance.encodeDataAsAddress(data, false, false); Object actual = offHeapRegionEntryHelperInstance.addressToObject(address, false, null); TinyStoredObject expected = new TinyStoredObject(address); assertThat(actual) .isInstanceOf(TinyStoredObject.class) .isEqualTo(expected); } @Test public void addressToObjectShouldReturnCachedDeserializableFromSerializedDataAsAddressIfAskedToDecompress() { byte[] data = EntryEventImpl.serialize(Integer.MAX_VALUE); RegionEntryContext regionContext = mock(RegionEntryContext.class); CachePerfStats cacheStats = mock(CachePerfStats.class); Compressor compressor = mock(Compressor.class); when(regionContext.getCompressor()) .thenReturn(compressor); when(compressor.decompress(data)) .thenReturn(data); when(regionContext.getCachePerfStats()) .thenReturn(cacheStats); when(cacheStats.startDecompression()) .thenReturn(10000L); long address = offHeapRegionEntryHelperInstance.encodeDataAsAddress(data, true, true); Object actual = offHeapRegionEntryHelperInstance.addressToObject(address, true, regionContext); assertThat(actual) .isInstanceOf(VMCachedDeserializable.class); int actualValue = (int) ((CachedDeserializable) actual).getDeserializedForReading(); assertThat(actualValue) .isEqualTo(Integer.MAX_VALUE); } @Test public void addressToObjectShouldReturnDecompressedValueFromDataAsAddressIfAskedToDecompress() { byte[] data = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); RegionEntryContext regionContext = mock(RegionEntryContext.class); CachePerfStats cacheStats = mock(CachePerfStats.class); Compressor compressor = mock(Compressor.class); when(regionContext.getCompressor()) .thenReturn(compressor); when(compressor.decompress(data)) .thenReturn(data); when(regionContext.getCachePerfStats()) .thenReturn(cacheStats); when(cacheStats.startDecompression()) .thenReturn(10000L); long address = offHeapRegionEntryHelperInstance.encodeDataAsAddress(data, false, true); Object actual = offHeapRegionEntryHelperInstance.addressToObject(address, true, regionContext); assertThat(actual) .isInstanceOf(byte[].class) .isEqualTo(data); } @Test public void addressToObjectShouldReturnToken() { Token token = (Token) offHeapRegionEntryHelperInstance .addressToObject(NULL_ADDRESS, false, null); assertThat(token).isNull(); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(INVALID_ADDRESS, false, null); assertThat(token).isEqualTo(Token.INVALID); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(LOCAL_INVALID_ADDRESS, false, null); assertThat(token).isEqualTo(Token.LOCAL_INVALID); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(DESTROYED_ADDRESS, false, null); assertThat(token).isEqualTo(Token.DESTROYED); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(REMOVED_PHASE1_ADDRESS, false, null); assertThat(token).isEqualTo(Token.REMOVED_PHASE1); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(REMOVED_PHASE2_ADDRESS, false, null); assertThat(token).isEqualTo(Token.REMOVED_PHASE2); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(END_OF_STREAM_ADDRESS, false, null); assertThat(token).isEqualTo(Token.END_OF_STREAM); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(NOT_AVAILABLE_ADDRESS, false, null); assertThat(token).isEqualTo(Token.NOT_AVAILABLE); token = (Token) offHeapRegionEntryHelperInstance .addressToObject(TOMBSTONE_ADDRESS, false, null); assertThat(token).isEqualTo(Token.TOMBSTONE); } @Test public void getSerializedLengthFromDataAsAddressShouldReturnValidLength() { byte[] data = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(Integer.MAX_VALUE).array(); long address = offHeapRegionEntryHelperInstance.encodeDataAsAddress(data, false, true); TinyStoredObject tinyStoredObject = new TinyStoredObject(address); int actualLength = offHeapRegionEntryHelperInstance.getSerializedLength(tinyStoredObject); assertThat(actualLength) .isEqualTo(data.length); } @Test public void getSerializedLengthFromDataAsAddressShouldReturnZeroForNonEncodedAddress() { TinyStoredObject nonEncodedAddress = new TinyStoredObject(100000L); int actualLength = offHeapRegionEntryHelperInstance.getSerializedLength(nonEncodedAddress); assertThat(actualLength) .isZero(); } @Test public void releaseEntryShouldSetValueToRemovePhase2() { OffHeapRegionEntry regionEntry = mock(OffHeapRegionEntry.class); when(regionEntry.getAddress()) .thenReturn(1L); when(regionEntry.setAddress(1L, REMOVED_PHASE2_ADDRESS)) .thenReturn(Boolean.TRUE); offHeapRegionEntryHelperInstance.releaseEntry(regionEntry); verify(offHeapRegionEntryHelperInstance) .setValue(regionEntry, Token.REMOVED_PHASE2); } @Test public void releaseEntryShouldSetValueToRemovePhase2AndSetsAsyncToFalseForDiskEntry() { OffHeapRegionEntry regionEntry = mock(VersionedStatsDiskRegionEntryOffHeap.class); DiskId diskId = spy(DiskId.class); when(regionEntry.getAddress()) .thenReturn(1L); when(regionEntry.setAddress(1L, REMOVED_PHASE2_ADDRESS)) .thenReturn(Boolean.TRUE); when(((DiskEntry) regionEntry).getDiskId()) .thenReturn(diskId); when(diskId.isPendingAsync()) .thenReturn(Boolean.TRUE); offHeapRegionEntryHelperInstance.releaseEntry(regionEntry); verify(offHeapRegionEntryHelperInstance) .setValue(regionEntry, Token.REMOVED_PHASE2); verify(diskId) .setPendingAsync(Boolean.FALSE); } @Test public void doWithOffHeapClearShouldSetTheThreadLocalToTrue() { // verify that threadlocal is not set assertThat(OffHeapClearRequired.doesClearNeedToCheckForOffHeap()).isFalse(); OffHeapClearRequired.doWithOffHeapClear(() -> { // verify that threadlocal is set when offheap is cleared assertThat(OffHeapClearRequired.doesClearNeedToCheckForOffHeap()).isTrue(); }); // verify that threadlocal is reset after offheap is cleared assertThat(OffHeapClearRequired.doesClearNeedToCheckForOffHeap()).isFalse(); } private OffHeapStoredObject createChunk(Object value) { byte[] bytes = EntryEventImpl.serialize(value); StoredObject chunk = memoryAllocator.allocateAndInitialize(bytes, true, false); return (OffHeapStoredObject) chunk; } private static void assertSerializedAndCompressedBits(long encodedAddress, boolean shouldSerializedBitBeSet, boolean shouldCompressedBitBeSet) { boolean isSerializedBitSet = (encodedAddress & SERIALIZED_BIT) == SERIALIZED_BIT; assertThat(isSerializedBitSet) .isEqualTo(shouldSerializedBitBeSet); boolean isCompressedBitSet = (encodedAddress & COMPRESSED_BIT) == COMPRESSED_BIT; assertThat(isCompressedBitSet) .isEqualTo(shouldCompressedBitBeSet); } }
apache/poi
35,595
poi-ooxml/src/main/java/org/apache/poi/xssf/usermodel/XSSFSimpleShape.java
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.xssf.usermodel; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Optional; import java.util.Spliterator; import java.util.function.Function; import java.util.function.Predicate; import org.apache.poi.hssf.util.HSSFColor; import org.apache.poi.ss.usermodel.SimpleShape; import org.apache.poi.ss.usermodel.VerticalAlignment; import org.apache.poi.util.Beta; import org.apache.poi.util.Internal; import org.apache.poi.xddf.usermodel.XDDFColor; import org.apache.poi.xddf.usermodel.XDDFColorRgbBinary; import org.apache.poi.xddf.usermodel.XDDFFillProperties; import org.apache.poi.xddf.usermodel.XDDFSolidFillProperties; import org.apache.poi.xddf.usermodel.text.TextContainer; import org.apache.poi.xddf.usermodel.text.XDDFRunProperties; import org.apache.poi.xddf.usermodel.text.XDDFTextBody; import org.apache.poi.xddf.usermodel.text.XDDFTextParagraph; import org.openxmlformats.schemas.drawingml.x2006.main.CTNonVisualDrawingProps; import org.openxmlformats.schemas.drawingml.x2006.main.CTPoint2D; import org.openxmlformats.schemas.drawingml.x2006.main.CTPositiveSize2D; import org.openxmlformats.schemas.drawingml.x2006.main.CTPresetGeometry2D; import org.openxmlformats.schemas.drawingml.x2006.main.CTRegularTextRun; import org.openxmlformats.schemas.drawingml.x2006.main.CTSRgbColor; import org.openxmlformats.schemas.drawingml.x2006.main.CTShapeProperties; import org.openxmlformats.schemas.drawingml.x2006.main.CTSolidColorFillProperties; import org.openxmlformats.schemas.drawingml.x2006.main.CTTextBody; import org.openxmlformats.schemas.drawingml.x2006.main.CTTextBodyProperties; import org.openxmlformats.schemas.drawingml.x2006.main.CTTextCharacterProperties; import org.openxmlformats.schemas.drawingml.x2006.main.CTTextFont; import org.openxmlformats.schemas.drawingml.x2006.main.CTTextParagraph; import org.openxmlformats.schemas.drawingml.x2006.main.CTTextParagraphProperties; import org.openxmlformats.schemas.drawingml.x2006.main.CTTransform2D; import org.openxmlformats.schemas.drawingml.x2006.main.STShapeType; import org.openxmlformats.schemas.drawingml.x2006.main.STTextAnchoringType; import org.openxmlformats.schemas.drawingml.x2006.main.STTextHorzOverflowType; import org.openxmlformats.schemas.drawingml.x2006.main.STTextUnderlineType; import org.openxmlformats.schemas.drawingml.x2006.main.STTextVertOverflowType; import org.openxmlformats.schemas.drawingml.x2006.main.STTextVerticalType; import org.openxmlformats.schemas.drawingml.x2006.main.STTextWrappingType; import org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.CTShape; import org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.CTShapeNonVisual; import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRElt; import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRPrElt; import org.openxmlformats.schemas.spreadsheetml.x2006.main.STUnderlineValues; /** * Represents a shape with a predefined geometry in a SpreadsheetML drawing. * Possible shape types are defined in * {@link org.apache.poi.ss.usermodel.ShapeTypes} */ public class XSSFSimpleShape extends XSSFShape implements Iterable<XSSFTextParagraph>, SimpleShape, TextContainer { /** * The text body containing the paragraphs for this shape. */ private final XDDFTextBody _textBody; /** * List of the paragraphs that make up the text in this shape */ private final List<XSSFTextParagraph> _paragraphs; /** * A default instance of CTShape used for creating new shapes. */ private static CTShape prototype; /** * Xml bean that stores properties of this shape */ private CTShape ctShape; protected XSSFSimpleShape(XSSFDrawing drawing, CTShape ctShape) { this.drawing = drawing; this.ctShape = ctShape; _paragraphs = new ArrayList<>(); // initialize any existing paragraphs - this will be the default body // paragraph in a new shape, // or existing paragraphs that have been loaded from the file CTTextBody body = ctShape.getTxBody(); if (body == null) { _textBody = null; } else { _textBody = new XDDFTextBody(this, body); for (int i = 0; i < body.sizeOfPArray(); i++) { _paragraphs.add(new XSSFTextParagraph(body.getPArray(i), ctShape)); } } } /** * Prototype with the default structure of a new auto-shape. */ protected static CTShape prototype() { if (prototype == null) { CTShape shape = CTShape.Factory.newInstance(); CTShapeNonVisual nv = shape.addNewNvSpPr(); CTNonVisualDrawingProps nvp = nv.addNewCNvPr(); nvp.setId(1); nvp.setName("Shape 1"); nv.addNewCNvSpPr(); CTShapeProperties sp = shape.addNewSpPr(); CTTransform2D t2d = sp.addNewXfrm(); CTPositiveSize2D p1 = t2d.addNewExt(); p1.setCx(0); p1.setCy(0); CTPoint2D p2 = t2d.addNewOff(); p2.setX(0); p2.setY(0); CTPresetGeometry2D geom = sp.addNewPrstGeom(); geom.setPrst(STShapeType.RECT); geom.addNewAvLst(); XDDFTextBody body = new XDDFTextBody(null, shape.addNewTxBody()); XDDFTextParagraph p = body.initialize(); XDDFRunProperties rp = p.getAfterLastRunProperties(); XDDFColor black = new XDDFColorRgbBinary(new byte[] { 0, 0, 0 }); XDDFFillProperties fp = new XDDFSolidFillProperties(black); rp.setFillProperties(fp); prototype = shape; } return prototype; } @Internal public CTShape getCTShape() { return ctShape; } @Beta public XDDFTextBody getTextBody() { return _textBody; } protected void setXfrm(CTTransform2D t2d) { ctShape.getSpPr().setXfrm(t2d); } @Override public Iterator<XSSFTextParagraph> iterator() { return _paragraphs.iterator(); } /** * @since POI 5.2.0 */ @Override public Spliterator<XSSFTextParagraph> spliterator() { return _paragraphs.spliterator(); } /** * Returns the text from all paragraphs in the shape. Paragraphs are * separated by new lines. * * @return text contained within this shape or empty string */ public String getText() { final int MAX_LEVELS = 9; StringBuilder out = new StringBuilder(); List<Integer> levelCount = new ArrayList<>(MAX_LEVELS); // maximum 9 // levels // initialise the levelCount array - this maintains a record of the // numbering to be used at each level for (int k = 0; k < MAX_LEVELS; k++) { levelCount.add(0); } for (int i = 0; i < _paragraphs.size(); i++) { if (out.length() > 0) { out.append('\n'); } XSSFTextParagraph p = _paragraphs.get(i); final String pText = p.getText(); if (p.isBullet() && !pText.isEmpty()) { int level = Math.min(p.getLevel(), MAX_LEVELS - 1); if (p.isBulletAutoNumber()) { i = processAutoNumGroup(i, level, levelCount, out); } else { // indent appropriately for the level for (int j = 0; j < level; j++) { out.append('\t'); } String character = p.getBulletCharacter(); out.append(!character.isEmpty() ? character + " " : "- "); out.append(pText); } } else { out.append(pText); // this paragraph is not a bullet, so reset the count array for (int k = 0; k < MAX_LEVELS; k++) { levelCount.set(k, 0); } } } return out.toString(); } /** * */ private int processAutoNumGroup(int index, int level, List<Integer> levelCount, StringBuilder out) { XSSFTextParagraph p = _paragraphs.get(index); // The rules for generating the auto numbers are as follows. If the // following paragraph is also // an auto-number, has the same type/scheme (and startAt if defined on // this paragraph) then they are // considered part of the same group. An empty bullet paragraph is // counted as part of the same // group but does not increment the count for the group. A change of // type, startAt or the paragraph // not being a bullet resets the count for that level to 1. // first auto-number paragraph so initialise to 1 or the bullets startAt // if present int startAt = p.getBulletAutoNumberStart(); ListAutoNumber scheme = p.getBulletAutoNumberScheme(); if (levelCount.get(level) == 0) { levelCount.set(level, startAt == 0 ? 1 : startAt); } // indent appropriately for the level for (int j = 0; j < level; j++) { out.append('\t'); } final String pText = p.getText(); if (!pText.isEmpty()) { out.append(getBulletPrefix(scheme, levelCount.get(level))); out.append(pText); } while (true) { XSSFTextParagraph nextp = (index + 1) == _paragraphs.size() ? null : _paragraphs.get(index + 1); if (nextp == null) { break; // out of paragraphs } if (!(nextp.isBullet() && p.isBulletAutoNumber())) { break; // not an auto-number bullet } if (nextp.getLevel() > level) { // recurse into the new level group if (out.length() > 0) { out.append('\n'); } index = processAutoNumGroup(index + 1, nextp.getLevel(), levelCount, out); continue; // restart the loop given the new index } else if (nextp.getLevel() < level) { break; // changed level } ListAutoNumber nextScheme = nextp.getBulletAutoNumberScheme(); int nextStartAt = nextp.getBulletAutoNumberStart(); if (nextScheme == scheme && nextStartAt == startAt) { // bullet is valid, so increment i ++index; if (out.length() > 0) { out.append('\n'); } // indent for the level for (int j = 0; j < level; j++) { out.append('\t'); } // check for empty text - only output a bullet if there is text, // but it is still part of the group final String npText = nextp.getText(); if (!npText.isEmpty()) { // increment the count for this level levelCount.set(level, levelCount.get(level) + 1); out.append(getBulletPrefix(nextScheme, levelCount.get(level))); out.append(npText); } } else { // something doesn't match so stop break; } } // end of the group so reset the count for this level levelCount.set(level, 0); return index; } /** * Returns a string containing an appropriate prefix for an auto-numbering * bullet * * @param scheme * the auto-numbering scheme used by the bullet * @param value * the value of the bullet * @return appropriate prefix for an auto-numbering bullet */ private String getBulletPrefix(ListAutoNumber scheme, int value) { StringBuilder out = new StringBuilder(); switch (scheme) { case ALPHA_LC_PARENT_BOTH: case ALPHA_LC_PARENT_R: if (scheme == ListAutoNumber.ALPHA_LC_PARENT_BOTH) { out.append('('); } out.append(valueToAlpha(value).toLowerCase(Locale.ROOT)); out.append(')'); break; case ALPHA_UC_PARENT_BOTH: case ALPHA_UC_PARENT_R: if (scheme == ListAutoNumber.ALPHA_UC_PARENT_BOTH) { out.append('('); } out.append(valueToAlpha(value)); out.append(')'); break; case ALPHA_LC_PERIOD: out.append(valueToAlpha(value).toLowerCase(Locale.ROOT)); out.append('.'); break; case ALPHA_UC_PERIOD: out.append(valueToAlpha(value)); out.append('.'); break; case ARABIC_PARENT_BOTH: case ARABIC_PARENT_R: if (scheme == ListAutoNumber.ARABIC_PARENT_BOTH) { out.append('('); } out.append(value); out.append(')'); break; case ARABIC_PERIOD: out.append(value); out.append('.'); break; case ARABIC_PLAIN: out.append(value); break; case ROMAN_LC_PARENT_BOTH: case ROMAN_LC_PARENT_R: if (scheme == ListAutoNumber.ROMAN_LC_PARENT_BOTH) { out.append('('); } out.append(valueToRoman(value).toLowerCase(Locale.ROOT)); out.append(')'); break; case ROMAN_UC_PARENT_BOTH: case ROMAN_UC_PARENT_R: if (scheme == ListAutoNumber.ROMAN_UC_PARENT_BOTH) { out.append('('); } out.append(valueToRoman(value)); out.append(')'); break; case ROMAN_LC_PERIOD: out.append(valueToRoman(value).toLowerCase(Locale.ROOT)); out.append('.'); break; case ROMAN_UC_PERIOD: out.append(valueToRoman(value)); out.append('.'); break; default: out.append('\u2022'); // can't set the font to wingdings so use the // default bullet character break; } out.append(" "); return out.toString(); } /** * Convert an integer to its alpha equivalent e.g. 1 = A, 2 = B, 27 = AA etc */ private String valueToAlpha(int value) { StringBuilder alpha = new StringBuilder(); int modulo; while (value > 0) { modulo = (value - 1) % 26; alpha.append((char) (65 + modulo)); value = (value - modulo) / 26; } alpha.reverse(); return alpha.toString(); } private static String[] _romanChars = new String[] { "M", "CM", "D", "CD", "C", "XC", "L", "XL", "X", "IX", "V", "IV", "I" }; private static int[] _romanAlphaValues = new int[] { 1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1 }; /** * Convert an integer to its roman equivalent e.g. 1 = I, 9 = IX etc */ private String valueToRoman(int value) { StringBuilder out = new StringBuilder(); for (int i = 0; value > 0 && i < _romanChars.length; i++) { while (_romanAlphaValues[i] <= value) { out.append(_romanChars[i]); value -= _romanAlphaValues[i]; } } return out.toString(); } /** * Clear all text from this shape */ public void clearText() { _paragraphs.clear(); CTTextBody txBody = ctShape.getTxBody(); txBody.setPArray(null); // remove any existing paragraphs } /** * Set a single paragraph of text on the shape. Note this will replace all * existing paragraphs created on the shape. * * @param text * string representing the paragraph text */ public void setText(String text) { clearText(); addNewTextParagraph().addNewTextRun().setText(text); } /** * Set a single paragraph of text on the shape. Note this will replace all * existing paragraphs created on the shape. * * @param str * rich text string representing the paragraph text */ public void setText(XSSFRichTextString str) { XSSFWorkbook wb = (XSSFWorkbook) getDrawing().getParent().getParent(); str.setStylesTableReference(wb.getStylesSource()); CTTextParagraph p = CTTextParagraph.Factory.newInstance(); if (str.numFormattingRuns() == 0) { CTRegularTextRun r = p.addNewR(); CTTextCharacterProperties rPr = r.addNewRPr(); rPr.setLang("en-US"); rPr.setSz(1100); r.setT(str.getString()); } else { for (int i = 0; i < str.getCTRst().sizeOfRArray(); i++) { CTRElt lt = str.getCTRst().getRArray(i); CTRPrElt ltPr = lt.getRPr(); if (ltPr == null) { ltPr = lt.addNewRPr(); } CTRegularTextRun r = p.addNewR(); CTTextCharacterProperties rPr = r.addNewRPr(); rPr.setLang("en-US"); applyAttributes(ltPr, rPr); r.setT(lt.getT()); } } clearText(); ctShape.getTxBody().setPArray(new CTTextParagraph[] { p }); _paragraphs.add(new XSSFTextParagraph(ctShape.getTxBody().getPArray(0), ctShape)); } /** * Returns a collection of the XSSFTextParagraphs that are attached to this * shape * * @return text paragraphs in this shape */ public List<XSSFTextParagraph> getTextParagraphs() { return _paragraphs; } /** * Add a new paragraph run to this shape * * @return created paragraph run */ public XSSFTextParagraph addNewTextParagraph() { CTTextBody txBody = ctShape.getTxBody(); CTTextParagraph p = txBody.addNewP(); XSSFTextParagraph paragraph = new XSSFTextParagraph(p, ctShape); _paragraphs.add(paragraph); return paragraph; } /** * Add a new paragraph run to this shape, set to the provided string * * @return created paragraph run */ public XSSFTextParagraph addNewTextParagraph(String text) { XSSFTextParagraph paragraph = addNewTextParagraph(); paragraph.addNewTextRun().setText(text); return paragraph; } /** * Add a new paragraph run to this shape, set to the provided rich text * string * * @return created paragraph run */ public XSSFTextParagraph addNewTextParagraph(XSSFRichTextString str) { CTTextBody txBody = ctShape.getTxBody(); CTTextParagraph p = txBody.addNewP(); if (str.numFormattingRuns() == 0) { CTRegularTextRun r = p.addNewR(); CTTextCharacterProperties rPr = r.addNewRPr(); rPr.setLang("en-US"); rPr.setSz(1100); r.setT(str.getString()); } else { for (int i = 0; i < str.getCTRst().sizeOfRArray(); i++) { CTRElt lt = str.getCTRst().getRArray(i); CTRPrElt ltPr = lt.getRPr(); if (ltPr == null) { ltPr = lt.addNewRPr(); } CTRegularTextRun r = p.addNewR(); CTTextCharacterProperties rPr = r.addNewRPr(); rPr.setLang("en-US"); applyAttributes(ltPr, rPr); r.setT(lt.getT()); } } // Note: the XSSFTextParagraph constructor will create its required // XSSFTextRuns from the provided CTTextParagraph XSSFTextParagraph paragraph = new XSSFTextParagraph(p, ctShape); _paragraphs.add(paragraph); return paragraph; } /** * Sets the type of horizontal overflow for the text. * * @param overflow * - the type of horizontal overflow. A <code>null</code> values * unsets this property. */ public void setTextHorizontalOverflow(TextHorizontalOverflow overflow) { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (overflow == null) { if (bodyPr.isSetHorzOverflow()) { bodyPr.unsetHorzOverflow(); } } else { bodyPr.setHorzOverflow(STTextHorzOverflowType.Enum.forInt(overflow.ordinal() + 1)); } } } /** * Returns the type of horizontal overflow for the text. * * @return the type of horizontal overflow */ public TextHorizontalOverflow getTextHorizontalOverflow() { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (bodyPr.isSetHorzOverflow()) { return TextHorizontalOverflow.values()[bodyPr.getHorzOverflow().intValue() - 1]; } } return TextHorizontalOverflow.OVERFLOW; } /** * Sets the type of vertical overflow for the text. * * @param overflow * - the type of vertical overflow. A <code>null</code> values * unsets this property. */ public void setTextVerticalOverflow(TextVerticalOverflow overflow) { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (overflow == null) { if (bodyPr.isSetVertOverflow()) { bodyPr.unsetVertOverflow(); } } else { bodyPr.setVertOverflow(STTextVertOverflowType.Enum.forInt(overflow.ordinal() + 1)); } } } /** * Returns the type of vertical overflow for the text. * * @return the type of vertical overflow */ public TextVerticalOverflow getTextVerticalOverflow() { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (bodyPr.isSetVertOverflow()) { return TextVerticalOverflow.values()[bodyPr.getVertOverflow().intValue() - 1]; } } return TextVerticalOverflow.OVERFLOW; } /** * Sets the type of vertical alignment for the text within the shape. * * @param anchor * - the type of alignment. A <code>null</code> values unsets * this property. */ public void setVerticalAlignment(VerticalAlignment anchor) { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (anchor == null) { if (bodyPr.isSetAnchor()) { bodyPr.unsetAnchor(); } } else { bodyPr.setAnchor(STTextAnchoringType.Enum.forInt(anchor.ordinal() + 1)); } } } /** * Returns the type of vertical alignment for the text within the shape. * * @return the type of vertical alignment */ public VerticalAlignment getVerticalAlignment() { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (bodyPr.isSetAnchor()) { return VerticalAlignment.values()[bodyPr.getAnchor().intValue() - 1]; } } return VerticalAlignment.TOP; } /** * Sets the vertical orientation of the text * * @param orientation * vertical orientation of the text A <code>null</code> values * unsets this property. */ public void setTextDirection(TextDirection orientation) { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (orientation == null) { if (bodyPr.isSetVert()) { bodyPr.unsetVert(); } } else { bodyPr.setVert(STTextVerticalType.Enum.forInt(orientation.ordinal() + 1)); } } } /** * Gets the vertical orientation of the text * * @return vertical orientation of the text */ public TextDirection getTextDirection() { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { STTextVerticalType.Enum val = bodyPr.getVert(); if (val != null) { return TextDirection.values()[val.intValue() - 1]; } } return TextDirection.HORIZONTAL; } /** * Returns the distance (in points) between the bottom of the text frame and * the bottom of the inscribed rectangle of the shape that contains the * text. * * @return the bottom inset in points */ public double getBottomInset() { Double inset = _textBody.getBodyProperties().getBottomInset(); if (inset == null) { // If this attribute is omitted, then a value of 0.05 inches is // implied return 3.6; } else { return inset; } } /** * Returns the distance (in points) between the left edge of the text frame * and the left edge of the inscribed rectangle of the shape that contains * the text. * * @return the left inset in points */ public double getLeftInset() { Double inset = _textBody.getBodyProperties().getLeftInset(); if (inset == null) { // If this attribute is omitted, then a value of 0.05 inches is // implied return 3.6; } else { return inset; } } /** * Returns the distance (in points) between the right edge of the text frame * and the right edge of the inscribed rectangle of the shape that contains * the text. * * @return the right inset in points */ public double getRightInset() { Double inset = _textBody.getBodyProperties().getRightInset(); if (inset == null) { // If this attribute is omitted, then a value of 0.05 inches is // implied return 3.6; } else { return inset; } } /** * Returns the distance (in points) between the top of the text frame and * the top of the inscribed rectangle of the shape that contains the text. * * @return the top inset in points */ public double getTopInset() { Double inset = _textBody.getBodyProperties().getTopInset(); if (inset == null) { // If this attribute is omitted, then a value of 0.05 inches is // implied return 3.6; } else { return inset; } } /** * Sets the bottom inset. * * @see #getBottomInset() * * @param margin * the bottom margin */ public void setBottomInset(double margin) { if (margin == -1) { _textBody.getBodyProperties().setBottomInset(null); } else { _textBody.getBodyProperties().setBottomInset(margin); } } /** * Sets the left inset. * * @see #getLeftInset() * * @param margin * the left margin */ public void setLeftInset(double margin) { if (margin == -1) { _textBody.getBodyProperties().setLeftInset(null); } else { _textBody.getBodyProperties().setLeftInset(margin); } } /** * Sets the right inset. * * @see #getRightInset() * * @param margin * the right margin */ public void setRightInset(double margin) { if (margin == -1) { _textBody.getBodyProperties().setRightInset(null); } else { _textBody.getBodyProperties().setRightInset(margin); } } /** * Sets the top inset. * * @see #getTopInset() * * @param margin * the top margin */ public void setTopInset(double margin) { if (margin == -1) { _textBody.getBodyProperties().setTopInset(null); } else { _textBody.getBodyProperties().setTopInset(margin); } } /** * @return whether to wrap words within the bounding rectangle */ public boolean getWordWrap() { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (bodyPr.isSetWrap()) { return bodyPr.getWrap() == STTextWrappingType.SQUARE; } } return true; } /** * * @param wrap * whether to wrap words within the bounding rectangle */ public void setWordWrap(boolean wrap) { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { bodyPr.setWrap(wrap ? STTextWrappingType.SQUARE : STTextWrappingType.NONE); } } /** * * Specifies that a shape should be auto-fit to fully contain the text * described within it. Auto-fitting is when text within a shape is scaled * in order to contain all the text inside * * @param value * type of autofit */ public void setTextAutofit(TextAutofit value) { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (bodyPr.isSetSpAutoFit()) { bodyPr.unsetSpAutoFit(); } if (bodyPr.isSetNoAutofit()) { bodyPr.unsetNoAutofit(); } if (bodyPr.isSetNormAutofit()) { bodyPr.unsetNormAutofit(); } switch (value) { case NONE: bodyPr.addNewNoAutofit(); break; case NORMAL: bodyPr.addNewNormAutofit(); break; case SHAPE: bodyPr.addNewSpAutoFit(); break; } } } /** * * @return type of autofit */ public TextAutofit getTextAutofit() { CTTextBodyProperties bodyPr = ctShape.getTxBody().getBodyPr(); if (bodyPr != null) { if (bodyPr.isSetNoAutofit()) { return TextAutofit.NONE; } else if (bodyPr.isSetNormAutofit()) { return TextAutofit.NORMAL; } else if (bodyPr.isSetSpAutoFit()) { return TextAutofit.SHAPE; } } return TextAutofit.NORMAL; } /** * Gets the shape type, one of the constants defined in * {@link org.apache.poi.ss.usermodel.ShapeTypes}. * * @return the shape type * @see org.apache.poi.ss.usermodel.ShapeTypes */ public int getShapeType() { return ctShape.getSpPr().getPrstGeom().getPrst().intValue(); } /** * Sets the shape types. * * @param type * the shape type, one of the constants defined in * {@link org.apache.poi.ss.usermodel.ShapeTypes}. * @see org.apache.poi.ss.usermodel.ShapeTypes */ public void setShapeType(int type) { ctShape.getSpPr().getPrstGeom().setPrst(STShapeType.Enum.forInt(type)); } @Override protected CTShapeProperties getShapeProperties() { return ctShape.getSpPr(); } /** * org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRPrElt to * org.openxmlformats.schemas.drawingml.x2006.main.CTFont adapter */ private static void applyAttributes(CTRPrElt pr, CTTextCharacterProperties rPr) { if (pr.sizeOfBArray() > 0) { rPr.setB(pr.getBArray(0).getVal()); } if (pr.sizeOfUArray() > 0) { STUnderlineValues.Enum u1 = pr.getUArray(0).getVal(); if (u1 == STUnderlineValues.SINGLE) { rPr.setU(STTextUnderlineType.SNG); } else if (u1 == STUnderlineValues.DOUBLE) { rPr.setU(STTextUnderlineType.DBL); } else if (u1 == STUnderlineValues.NONE) { rPr.setU(STTextUnderlineType.NONE); } } if (pr.sizeOfIArray() > 0) { rPr.setI(pr.getIArray(0).getVal()); } if (pr.sizeOfRFontArray() > 0) { CTTextFont rFont = rPr.isSetLatin() ? rPr.getLatin() : rPr.addNewLatin(); rFont.setTypeface(pr.getRFontArray(0).getVal()); } if (pr.sizeOfSzArray() > 0) { int sz = (int) (pr.getSzArray(0).getVal() * 100); rPr.setSz(sz); } if (pr.sizeOfColorArray() > 0) { CTSolidColorFillProperties fill = rPr.isSetSolidFill() ? rPr.getSolidFill() : rPr.addNewSolidFill(); org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColor xlsColor = pr.getColorArray(0); if (xlsColor.isSetRgb()) { CTSRgbColor clr = fill.isSetSrgbClr() ? fill.getSrgbClr() : fill.addNewSrgbClr(); clr.setVal(xlsColor.getRgb()); } else if (xlsColor.isSetIndexed()) { HSSFColor indexed = HSSFColor.getIndexHash().get((int) xlsColor.getIndexed()); if (indexed != null) { byte[] rgb = new byte[3]; rgb[0] = (byte) indexed.getTriplet()[0]; rgb[1] = (byte) indexed.getTriplet()[1]; rgb[2] = (byte) indexed.getTriplet()[2]; CTSRgbColor clr = fill.isSetSrgbClr() ? fill.getSrgbClr() : fill.addNewSrgbClr(); clr.setVal(rgb); } } } } @Override public String getShapeName() { return ctShape.getNvSpPr().getCNvPr().getName(); } @Override public int getShapeId() { return (int) ctShape.getNvSpPr().getCNvPr().getId(); } @Override public <R> Optional<R> findDefinedParagraphProperty(Predicate<CTTextParagraphProperties> isSet, Function<CTTextParagraphProperties, R> getter) { // TODO Auto-generated method stub return Optional.empty(); } @Override public <R> Optional<R> findDefinedRunProperty(Predicate<CTTextCharacterProperties> isSet, Function<CTTextCharacterProperties, R> getter) { // TODO Auto-generated method stub return Optional.empty(); } }
googleapis/google-cloud-java
35,376
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/ConsistentHashLoadBalancerSettingsHttpCookie.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * The information about the HTTP Cookie on which the hash function is based for load balancing policies that use a consistent hash. * </pre> * * Protobuf type {@code google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie} */ public final class ConsistentHashLoadBalancerSettingsHttpCookie extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie) ConsistentHashLoadBalancerSettingsHttpCookieOrBuilder { private static final long serialVersionUID = 0L; // Use ConsistentHashLoadBalancerSettingsHttpCookie.newBuilder() to construct. private ConsistentHashLoadBalancerSettingsHttpCookie( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConsistentHashLoadBalancerSettingsHttpCookie() { name_ = ""; path_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ConsistentHashLoadBalancerSettingsHttpCookie(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ConsistentHashLoadBalancerSettingsHttpCookie_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ConsistentHashLoadBalancerSettingsHttpCookie_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie.class, com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 3373707; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @return Whether the name field is set. */ @java.lang.Override public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PATH_FIELD_NUMBER = 3433509; @SuppressWarnings("serial") private volatile java.lang.Object path_ = ""; /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @return Whether the path field is set. */ @java.lang.Override public boolean hasPath() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @return The path. */ @java.lang.Override public java.lang.String getPath() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } } /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @return The bytes for path. */ @java.lang.Override public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TTL_FIELD_NUMBER = 115180; private com.google.cloud.compute.v1.Duration ttl_; /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> * * @return Whether the ttl field is set. */ @java.lang.Override public boolean hasTtl() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> * * @return The ttl. */ @java.lang.Override public com.google.cloud.compute.v1.Duration getTtl() { return ttl_ == null ? com.google.cloud.compute.v1.Duration.getDefaultInstance() : ttl_; } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ @java.lang.Override public com.google.cloud.compute.v1.DurationOrBuilder getTtlOrBuilder() { return ttl_ == null ? com.google.cloud.compute.v1.Duration.getDefaultInstance() : ttl_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(115180, getTtl()); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3373707, name_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3433509, path_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(115180, getTtl()); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3373707, name_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3433509, path_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie)) { return super.equals(obj); } com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie other = (com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie) obj; if (hasName() != other.hasName()) return false; if (hasName()) { if (!getName().equals(other.getName())) return false; } if (hasPath() != other.hasPath()) return false; if (hasPath()) { if (!getPath().equals(other.getPath())) return false; } if (hasTtl() != other.hasTtl()) return false; if (hasTtl()) { if (!getTtl().equals(other.getTtl())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasPath()) { hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); } if (hasTtl()) { hash = (37 * hash) + TTL_FIELD_NUMBER; hash = (53 * hash) + getTtl().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The information about the HTTP Cookie on which the hash function is based for load balancing policies that use a consistent hash. * </pre> * * Protobuf type {@code google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie) com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookieOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ConsistentHashLoadBalancerSettingsHttpCookie_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ConsistentHashLoadBalancerSettingsHttpCookie_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie.class, com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie.Builder .class); } // Construct using // com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getTtlFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; path_ = ""; ttl_ = null; if (ttlBuilder_ != null) { ttlBuilder_.dispose(); ttlBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ConsistentHashLoadBalancerSettingsHttpCookie_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie getDefaultInstanceForType() { return com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie .getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie build() { com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie buildPartial() { com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie result = new com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.path_ = path_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.ttl_ = ttlBuilder_ == null ? ttl_ : ttlBuilder_.build(); to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie) { return mergeFrom( (com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie other) { if (other == com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie .getDefaultInstance()) return this; if (other.hasName()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasPath()) { path_ = other.path_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasTtl()) { mergeTtl(other.getTtl()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 921442: { input.readMessage(getTtlFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 921442 case 26989658: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 26989658 case 27468074: { path_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 27468074 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @return Whether the name field is set. */ public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of the cookie. * </pre> * * <code>optional string name = 3373707;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object path_ = ""; /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @return Whether the path field is set. */ public boolean hasPath() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @return The path. */ public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @return The bytes for path. */ public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @param value The path to set. * @return This builder for chaining. */ public Builder setPath(java.lang.String value) { if (value == null) { throw new NullPointerException(); } path_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @return This builder for chaining. */ public Builder clearPath() { path_ = getDefaultInstance().getPath(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Path to set for the cookie. * </pre> * * <code>optional string path = 3433509;</code> * * @param value The bytes for path to set. * @return This builder for chaining. */ public Builder setPathBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); path_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.compute.v1.Duration ttl_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Duration, com.google.cloud.compute.v1.Duration.Builder, com.google.cloud.compute.v1.DurationOrBuilder> ttlBuilder_; /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> * * @return Whether the ttl field is set. */ public boolean hasTtl() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> * * @return The ttl. */ public com.google.cloud.compute.v1.Duration getTtl() { if (ttlBuilder_ == null) { return ttl_ == null ? com.google.cloud.compute.v1.Duration.getDefaultInstance() : ttl_; } else { return ttlBuilder_.getMessage(); } } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ public Builder setTtl(com.google.cloud.compute.v1.Duration value) { if (ttlBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ttl_ = value; } else { ttlBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ public Builder setTtl(com.google.cloud.compute.v1.Duration.Builder builderForValue) { if (ttlBuilder_ == null) { ttl_ = builderForValue.build(); } else { ttlBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ public Builder mergeTtl(com.google.cloud.compute.v1.Duration value) { if (ttlBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && ttl_ != null && ttl_ != com.google.cloud.compute.v1.Duration.getDefaultInstance()) { getTtlBuilder().mergeFrom(value); } else { ttl_ = value; } } else { ttlBuilder_.mergeFrom(value); } if (ttl_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ public Builder clearTtl() { bitField0_ = (bitField0_ & ~0x00000004); ttl_ = null; if (ttlBuilder_ != null) { ttlBuilder_.dispose(); ttlBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ public com.google.cloud.compute.v1.Duration.Builder getTtlBuilder() { bitField0_ |= 0x00000004; onChanged(); return getTtlFieldBuilder().getBuilder(); } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ public com.google.cloud.compute.v1.DurationOrBuilder getTtlOrBuilder() { if (ttlBuilder_ != null) { return ttlBuilder_.getMessageOrBuilder(); } else { return ttl_ == null ? com.google.cloud.compute.v1.Duration.getDefaultInstance() : ttl_; } } /** * * * <pre> * Lifetime of the cookie. * </pre> * * <code>optional .google.cloud.compute.v1.Duration ttl = 115180;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Duration, com.google.cloud.compute.v1.Duration.Builder, com.google.cloud.compute.v1.DurationOrBuilder> getTtlFieldBuilder() { if (ttlBuilder_ == null) { ttlBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Duration, com.google.cloud.compute.v1.Duration.Builder, com.google.cloud.compute.v1.DurationOrBuilder>( getTtl(), getParentForChildren(), isClean()); ttl_ = null; } return ttlBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie) private static final com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie(); } public static com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ConsistentHashLoadBalancerSettingsHttpCookie> PARSER = new com.google.protobuf.AbstractParser<ConsistentHashLoadBalancerSettingsHttpCookie>() { @java.lang.Override public ConsistentHashLoadBalancerSettingsHttpCookie parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ConsistentHashLoadBalancerSettingsHttpCookie> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ConsistentHashLoadBalancerSettingsHttpCookie> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.ConsistentHashLoadBalancerSettingsHttpCookie getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,422
java-oracledatabase/proto-google-cloud-oracledatabase-v1/src/main/java/com/google/cloud/oracledatabase/v1/ListCloudVmClustersRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/oracledatabase/v1/oracledatabase.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.oracledatabase.v1; /** * * * <pre> * The request for `CloudVmCluster.List`. * </pre> * * Protobuf type {@code google.cloud.oracledatabase.v1.ListCloudVmClustersRequest} */ public final class ListCloudVmClustersRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.oracledatabase.v1.ListCloudVmClustersRequest) ListCloudVmClustersRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListCloudVmClustersRequest.newBuilder() to construct. private ListCloudVmClustersRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCloudVmClustersRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCloudVmClustersRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListCloudVmClustersRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListCloudVmClustersRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest.class, com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent in the following format: * projects/{project}/locations/{location}. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the parent in the following format: * projects/{project}/locations/{location}. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. The number of VM clusters to return. * If unspecified, at most 50 VM clusters will be returned. * The maximum value is 1,000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying the page of results the server returns. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A token identifying the page of results the server returns. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. An expression for filtering the results of the request. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. An expression for filtering the results of the request. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest)) { return super.equals(obj); } com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest other = (com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request for `CloudVmCluster.List`. * </pre> * * Protobuf type {@code google.cloud.oracledatabase.v1.ListCloudVmClustersRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.oracledatabase.v1.ListCloudVmClustersRequest) com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListCloudVmClustersRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListCloudVmClustersRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest.class, com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest.Builder.class); } // Construct using com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.oracledatabase.v1.V1mainProto .internal_static_google_cloud_oracledatabase_v1_ListCloudVmClustersRequest_descriptor; } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest getDefaultInstanceForType() { return com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest build() { com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest buildPartial() { com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest result = new com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest) { return mergeFrom((com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest other) { if (other == com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent in the following format: * projects/{project}/locations/{location}. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the parent in the following format: * projects/{project}/locations/{location}. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the parent in the following format: * projects/{project}/locations/{location}. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the parent in the following format: * projects/{project}/locations/{location}. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the parent in the following format: * projects/{project}/locations/{location}. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The number of VM clusters to return. * If unspecified, at most 50 VM clusters will be returned. * The maximum value is 1,000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The number of VM clusters to return. * If unspecified, at most 50 VM clusters will be returned. * The maximum value is 1,000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The number of VM clusters to return. * If unspecified, at most 50 VM clusters will be returned. * The maximum value is 1,000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying the page of results the server returns. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token identifying the page of results the server returns. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token identifying the page of results the server returns. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A token identifying the page of results the server returns. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A token identifying the page of results the server returns. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. An expression for filtering the results of the request. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An expression for filtering the results of the request. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An expression for filtering the results of the request. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. An expression for filtering the results of the request. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. An expression for filtering the results of the request. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.oracledatabase.v1.ListCloudVmClustersRequest) } // @@protoc_insertion_point(class_scope:google.cloud.oracledatabase.v1.ListCloudVmClustersRequest) private static final com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest(); } public static com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCloudVmClustersRequest> PARSER = new com.google.protobuf.AbstractParser<ListCloudVmClustersRequest>() { @java.lang.Override public ListCloudVmClustersRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCloudVmClustersRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCloudVmClustersRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.oracledatabase.v1.ListCloudVmClustersRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,769
java-valkey/google-cloud-valkey/src/main/java/com/google/cloud/memorystore/v1beta/stub/MemorystoreStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.memorystore.v1beta.stub; import static com.google.cloud.memorystore.v1beta.MemorystoreClient.ListInstancesPagedResponse; import static com.google.cloud.memorystore.v1beta.MemorystoreClient.ListLocationsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.httpjson.ProtoOperationTransformers; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.memorystore.v1beta.CertificateAuthority; import com.google.cloud.memorystore.v1beta.CreateInstanceRequest; import com.google.cloud.memorystore.v1beta.DeleteInstanceRequest; import com.google.cloud.memorystore.v1beta.GetCertificateAuthorityRequest; import com.google.cloud.memorystore.v1beta.GetInstanceRequest; import com.google.cloud.memorystore.v1beta.Instance; import com.google.cloud.memorystore.v1beta.ListInstancesRequest; import com.google.cloud.memorystore.v1beta.ListInstancesResponse; import com.google.cloud.memorystore.v1beta.OperationMetadata; import com.google.cloud.memorystore.v1beta.UpdateInstanceRequest; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link MemorystoreStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (memorystore.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getInstance: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MemorystoreStubSettings.Builder memorystoreSettingsBuilder = * MemorystoreStubSettings.newBuilder(); * memorystoreSettingsBuilder * .getInstanceSettings() * .setRetrySettings( * memorystoreSettingsBuilder * .getInstanceSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * MemorystoreStubSettings memorystoreSettings = memorystoreSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createInstance: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MemorystoreStubSettings.Builder memorystoreSettingsBuilder = * MemorystoreStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * memorystoreSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @BetaApi @Generated("by gapic-generator-java") public class MemorystoreStubSettings extends StubSettings<MemorystoreStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final PagedCallSettings< ListInstancesRequest, ListInstancesResponse, ListInstancesPagedResponse> listInstancesSettings; private final UnaryCallSettings<GetInstanceRequest, Instance> getInstanceSettings; private final UnaryCallSettings<CreateInstanceRequest, Operation> createInstanceSettings; private final OperationCallSettings<CreateInstanceRequest, Instance, OperationMetadata> createInstanceOperationSettings; private final UnaryCallSettings<UpdateInstanceRequest, Operation> updateInstanceSettings; private final OperationCallSettings<UpdateInstanceRequest, Instance, OperationMetadata> updateInstanceOperationSettings; private final UnaryCallSettings<DeleteInstanceRequest, Operation> deleteInstanceSettings; private final OperationCallSettings<DeleteInstanceRequest, Empty, OperationMetadata> deleteInstanceOperationSettings; private final UnaryCallSettings<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthoritySettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListInstancesRequest, ListInstancesResponse, Instance> LIST_INSTANCES_PAGE_STR_DESC = new PagedListDescriptor<ListInstancesRequest, ListInstancesResponse, Instance>() { @Override public String emptyToken() { return ""; } @Override public ListInstancesRequest injectToken(ListInstancesRequest payload, String token) { return ListInstancesRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListInstancesRequest injectPageSize(ListInstancesRequest payload, int pageSize) { return ListInstancesRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListInstancesRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListInstancesResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Instance> extractResources(ListInstancesResponse payload) { return payload.getInstancesList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListInstancesRequest, ListInstancesResponse, ListInstancesPagedResponse> LIST_INSTANCES_PAGE_STR_FACT = new PagedListResponseFactory< ListInstancesRequest, ListInstancesResponse, ListInstancesPagedResponse>() { @Override public ApiFuture<ListInstancesPagedResponse> getFuturePagedResponse( UnaryCallable<ListInstancesRequest, ListInstancesResponse> callable, ListInstancesRequest request, ApiCallContext context, ApiFuture<ListInstancesResponse> futureResponse) { PageContext<ListInstancesRequest, ListInstancesResponse, Instance> pageContext = PageContext.create(callable, LIST_INSTANCES_PAGE_STR_DESC, request, context); return ListInstancesPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listInstances. */ public PagedCallSettings<ListInstancesRequest, ListInstancesResponse, ListInstancesPagedResponse> listInstancesSettings() { return listInstancesSettings; } /** Returns the object with the settings used for calls to getInstance. */ public UnaryCallSettings<GetInstanceRequest, Instance> getInstanceSettings() { return getInstanceSettings; } /** Returns the object with the settings used for calls to createInstance. */ public UnaryCallSettings<CreateInstanceRequest, Operation> createInstanceSettings() { return createInstanceSettings; } /** Returns the object with the settings used for calls to createInstance. */ public OperationCallSettings<CreateInstanceRequest, Instance, OperationMetadata> createInstanceOperationSettings() { return createInstanceOperationSettings; } /** Returns the object with the settings used for calls to updateInstance. */ public UnaryCallSettings<UpdateInstanceRequest, Operation> updateInstanceSettings() { return updateInstanceSettings; } /** Returns the object with the settings used for calls to updateInstance. */ public OperationCallSettings<UpdateInstanceRequest, Instance, OperationMetadata> updateInstanceOperationSettings() { return updateInstanceOperationSettings; } /** Returns the object with the settings used for calls to deleteInstance. */ public UnaryCallSettings<DeleteInstanceRequest, Operation> deleteInstanceSettings() { return deleteInstanceSettings; } /** Returns the object with the settings used for calls to deleteInstance. */ public OperationCallSettings<DeleteInstanceRequest, Empty, OperationMetadata> deleteInstanceOperationSettings() { return deleteInstanceOperationSettings; } /** Returns the object with the settings used for calls to getCertificateAuthority. */ public UnaryCallSettings<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthoritySettings() { return getCertificateAuthoritySettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public MemorystoreStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonMemorystoreStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "memorystore"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "memorystore.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "memorystore.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultHttpJsonTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(MemorystoreStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected MemorystoreStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listInstancesSettings = settingsBuilder.listInstancesSettings().build(); getInstanceSettings = settingsBuilder.getInstanceSettings().build(); createInstanceSettings = settingsBuilder.createInstanceSettings().build(); createInstanceOperationSettings = settingsBuilder.createInstanceOperationSettings().build(); updateInstanceSettings = settingsBuilder.updateInstanceSettings().build(); updateInstanceOperationSettings = settingsBuilder.updateInstanceOperationSettings().build(); deleteInstanceSettings = settingsBuilder.deleteInstanceSettings().build(); deleteInstanceOperationSettings = settingsBuilder.deleteInstanceOperationSettings().build(); getCertificateAuthoritySettings = settingsBuilder.getCertificateAuthoritySettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for MemorystoreStubSettings. */ public static class Builder extends StubSettings.Builder<MemorystoreStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListInstancesRequest, ListInstancesResponse, ListInstancesPagedResponse> listInstancesSettings; private final UnaryCallSettings.Builder<GetInstanceRequest, Instance> getInstanceSettings; private final UnaryCallSettings.Builder<CreateInstanceRequest, Operation> createInstanceSettings; private final OperationCallSettings.Builder<CreateInstanceRequest, Instance, OperationMetadata> createInstanceOperationSettings; private final UnaryCallSettings.Builder<UpdateInstanceRequest, Operation> updateInstanceSettings; private final OperationCallSettings.Builder<UpdateInstanceRequest, Instance, OperationMetadata> updateInstanceOperationSettings; private final UnaryCallSettings.Builder<DeleteInstanceRequest, Operation> deleteInstanceSettings; private final OperationCallSettings.Builder<DeleteInstanceRequest, Empty, OperationMetadata> deleteInstanceOperationSettings; private final UnaryCallSettings.Builder<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthoritySettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(10000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(600000L)) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build(); definitions.put("no_retry_1_params", settings); settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); definitions.put("no_retry_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listInstancesSettings = PagedCallSettings.newBuilder(LIST_INSTANCES_PAGE_STR_FACT); getInstanceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createInstanceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createInstanceOperationSettings = OperationCallSettings.newBuilder(); updateInstanceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateInstanceOperationSettings = OperationCallSettings.newBuilder(); deleteInstanceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteInstanceOperationSettings = OperationCallSettings.newBuilder(); getCertificateAuthoritySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listInstancesSettings, getInstanceSettings, createInstanceSettings, updateInstanceSettings, deleteInstanceSettings, getCertificateAuthoritySettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(MemorystoreStubSettings settings) { super(settings); listInstancesSettings = settings.listInstancesSettings.toBuilder(); getInstanceSettings = settings.getInstanceSettings.toBuilder(); createInstanceSettings = settings.createInstanceSettings.toBuilder(); createInstanceOperationSettings = settings.createInstanceOperationSettings.toBuilder(); updateInstanceSettings = settings.updateInstanceSettings.toBuilder(); updateInstanceOperationSettings = settings.updateInstanceOperationSettings.toBuilder(); deleteInstanceSettings = settings.deleteInstanceSettings.toBuilder(); deleteInstanceOperationSettings = settings.deleteInstanceOperationSettings.toBuilder(); getCertificateAuthoritySettings = settings.getCertificateAuthoritySettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listInstancesSettings, getInstanceSettings, createInstanceSettings, updateInstanceSettings, deleteInstanceSettings, getCertificateAuthoritySettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listInstancesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getInstanceSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createInstanceSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .updateInstanceSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .deleteInstanceSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .getCertificateAuthoritySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .createInstanceOperationSettings() .setInitialCallSettings( UnaryCallSettings .<CreateInstanceRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Instance.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .updateInstanceOperationSettings() .setInitialCallSettings( UnaryCallSettings .<UpdateInstanceRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Instance.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .deleteInstanceOperationSettings() .setInitialCallSettings( UnaryCallSettings .<DeleteInstanceRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listInstances. */ public PagedCallSettings.Builder< ListInstancesRequest, ListInstancesResponse, ListInstancesPagedResponse> listInstancesSettings() { return listInstancesSettings; } /** Returns the builder for the settings used for calls to getInstance. */ public UnaryCallSettings.Builder<GetInstanceRequest, Instance> getInstanceSettings() { return getInstanceSettings; } /** Returns the builder for the settings used for calls to createInstance. */ public UnaryCallSettings.Builder<CreateInstanceRequest, Operation> createInstanceSettings() { return createInstanceSettings; } /** Returns the builder for the settings used for calls to createInstance. */ public OperationCallSettings.Builder<CreateInstanceRequest, Instance, OperationMetadata> createInstanceOperationSettings() { return createInstanceOperationSettings; } /** Returns the builder for the settings used for calls to updateInstance. */ public UnaryCallSettings.Builder<UpdateInstanceRequest, Operation> updateInstanceSettings() { return updateInstanceSettings; } /** Returns the builder for the settings used for calls to updateInstance. */ public OperationCallSettings.Builder<UpdateInstanceRequest, Instance, OperationMetadata> updateInstanceOperationSettings() { return updateInstanceOperationSettings; } /** Returns the builder for the settings used for calls to deleteInstance. */ public UnaryCallSettings.Builder<DeleteInstanceRequest, Operation> deleteInstanceSettings() { return deleteInstanceSettings; } /** Returns the builder for the settings used for calls to deleteInstance. */ public OperationCallSettings.Builder<DeleteInstanceRequest, Empty, OperationMetadata> deleteInstanceOperationSettings() { return deleteInstanceOperationSettings; } /** Returns the builder for the settings used for calls to getCertificateAuthority. */ public UnaryCallSettings.Builder<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthoritySettings() { return getCertificateAuthoritySettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public MemorystoreStubSettings build() throws IOException { return new MemorystoreStubSettings(this); } } }
googleapis/sdk-platform-java
35,427
java-showcase/proto-gapic-showcase-v1beta1/src/main/java/com/google/showcase/v1beta1/ExpandRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: schema/google/showcase/v1beta1/echo.proto // Protobuf Java Version: 3.25.8 package com.google.showcase.v1beta1; /** * * * <pre> * The request message for the Expand method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ExpandRequest} */ public final class ExpandRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.showcase.v1beta1.ExpandRequest) ExpandRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ExpandRequest.newBuilder() to construct. private ExpandRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExpandRequest() { content_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExpandRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.EchoOuterClass .internal_static_google_showcase_v1beta1_ExpandRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.EchoOuterClass .internal_static_google_showcase_v1beta1_ExpandRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ExpandRequest.class, com.google.showcase.v1beta1.ExpandRequest.Builder.class); } private int bitField0_; public static final int CONTENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object content_ = ""; /** * * * <pre> * The content that will be split into words and returned on the stream. * </pre> * * <code>string content = 1;</code> * * @return The content. */ @java.lang.Override public java.lang.String getContent() { java.lang.Object ref = content_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); content_ = s; return s; } } /** * * * <pre> * The content that will be split into words and returned on the stream. * </pre> * * <code>string content = 1;</code> * * @return The bytes for content. */ @java.lang.Override public com.google.protobuf.ByteString getContentBytes() { java.lang.Object ref = content_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); content_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ERROR_FIELD_NUMBER = 2; private com.google.rpc.Status error_; /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ @java.lang.Override public boolean hasError() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ @java.lang.Override public com.google.rpc.Status getError() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } public static final int STREAM_WAIT_TIME_FIELD_NUMBER = 3; private com.google.protobuf.Duration streamWaitTime_; /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> * * @return Whether the streamWaitTime field is set. */ @java.lang.Override public boolean hasStreamWaitTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> * * @return The streamWaitTime. */ @java.lang.Override public com.google.protobuf.Duration getStreamWaitTime() { return streamWaitTime_ == null ? com.google.protobuf.Duration.getDefaultInstance() : streamWaitTime_; } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getStreamWaitTimeOrBuilder() { return streamWaitTime_ == null ? com.google.protobuf.Duration.getDefaultInstance() : streamWaitTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, content_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getError()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getStreamWaitTime()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, content_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getError()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getStreamWaitTime()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.showcase.v1beta1.ExpandRequest)) { return super.equals(obj); } com.google.showcase.v1beta1.ExpandRequest other = (com.google.showcase.v1beta1.ExpandRequest) obj; if (!getContent().equals(other.getContent())) return false; if (hasError() != other.hasError()) return false; if (hasError()) { if (!getError().equals(other.getError())) return false; } if (hasStreamWaitTime() != other.hasStreamWaitTime()) return false; if (hasStreamWaitTime()) { if (!getStreamWaitTime().equals(other.getStreamWaitTime())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CONTENT_FIELD_NUMBER; hash = (53 * hash) + getContent().hashCode(); if (hasError()) { hash = (37 * hash) + ERROR_FIELD_NUMBER; hash = (53 * hash) + getError().hashCode(); } if (hasStreamWaitTime()) { hash = (37 * hash) + STREAM_WAIT_TIME_FIELD_NUMBER; hash = (53 * hash) + getStreamWaitTime().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.showcase.v1beta1.ExpandRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ExpandRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ExpandRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ExpandRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.showcase.v1beta1.ExpandRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for the Expand method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ExpandRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.showcase.v1beta1.ExpandRequest) com.google.showcase.v1beta1.ExpandRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.EchoOuterClass .internal_static_google_showcase_v1beta1_ExpandRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.EchoOuterClass .internal_static_google_showcase_v1beta1_ExpandRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ExpandRequest.class, com.google.showcase.v1beta1.ExpandRequest.Builder.class); } // Construct using com.google.showcase.v1beta1.ExpandRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getErrorFieldBuilder(); getStreamWaitTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; content_ = ""; error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } streamWaitTime_ = null; if (streamWaitTimeBuilder_ != null) { streamWaitTimeBuilder_.dispose(); streamWaitTimeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.showcase.v1beta1.EchoOuterClass .internal_static_google_showcase_v1beta1_ExpandRequest_descriptor; } @java.lang.Override public com.google.showcase.v1beta1.ExpandRequest getDefaultInstanceForType() { return com.google.showcase.v1beta1.ExpandRequest.getDefaultInstance(); } @java.lang.Override public com.google.showcase.v1beta1.ExpandRequest build() { com.google.showcase.v1beta1.ExpandRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.showcase.v1beta1.ExpandRequest buildPartial() { com.google.showcase.v1beta1.ExpandRequest result = new com.google.showcase.v1beta1.ExpandRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.showcase.v1beta1.ExpandRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.content_ = content_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.streamWaitTime_ = streamWaitTimeBuilder_ == null ? streamWaitTime_ : streamWaitTimeBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.showcase.v1beta1.ExpandRequest) { return mergeFrom((com.google.showcase.v1beta1.ExpandRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.showcase.v1beta1.ExpandRequest other) { if (other == com.google.showcase.v1beta1.ExpandRequest.getDefaultInstance()) return this; if (!other.getContent().isEmpty()) { content_ = other.content_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasError()) { mergeError(other.getError()); } if (other.hasStreamWaitTime()) { mergeStreamWaitTime(other.getStreamWaitTime()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { content_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getStreamWaitTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object content_ = ""; /** * * * <pre> * The content that will be split into words and returned on the stream. * </pre> * * <code>string content = 1;</code> * * @return The content. */ public java.lang.String getContent() { java.lang.Object ref = content_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); content_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The content that will be split into words and returned on the stream. * </pre> * * <code>string content = 1;</code> * * @return The bytes for content. */ public com.google.protobuf.ByteString getContentBytes() { java.lang.Object ref = content_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); content_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The content that will be split into words and returned on the stream. * </pre> * * <code>string content = 1;</code> * * @param value The content to set. * @return This builder for chaining. */ public Builder setContent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } content_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The content that will be split into words and returned on the stream. * </pre> * * <code>string content = 1;</code> * * @return This builder for chaining. */ public Builder clearContent() { content_ = getDefaultInstance().getContent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The content that will be split into words and returned on the stream. * </pre> * * <code>string content = 1;</code> * * @param value The bytes for content to set. * @return This builder for chaining. */ public Builder setContentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); content_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.rpc.Status error_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorBuilder_; /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ public boolean hasError() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ public com.google.rpc.Status getError() { if (errorBuilder_ == null) { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } else { return errorBuilder_.getMessage(); } } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } error_ = value; } else { errorBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status.Builder builderForValue) { if (errorBuilder_ == null) { error_ = builderForValue.build(); } else { errorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder mergeError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && error_ != null && error_ != com.google.rpc.Status.getDefaultInstance()) { getErrorBuilder().mergeFrom(value); } else { error_ = value; } } else { errorBuilder_.mergeFrom(value); } if (error_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder clearError() { bitField0_ = (bitField0_ & ~0x00000002); error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.Status.Builder getErrorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getErrorFieldBuilder().getBuilder(); } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { if (errorBuilder_ != null) { return errorBuilder_.getMessageOrBuilder(); } else { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } } /** * * * <pre> * The error that is thrown after all words are sent on the stream. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getErrorFieldBuilder() { if (errorBuilder_ == null) { errorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean()); error_ = null; } return errorBuilder_; } private com.google.protobuf.Duration streamWaitTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> streamWaitTimeBuilder_; /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> * * @return Whether the streamWaitTime field is set. */ public boolean hasStreamWaitTime() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> * * @return The streamWaitTime. */ public com.google.protobuf.Duration getStreamWaitTime() { if (streamWaitTimeBuilder_ == null) { return streamWaitTime_ == null ? com.google.protobuf.Duration.getDefaultInstance() : streamWaitTime_; } else { return streamWaitTimeBuilder_.getMessage(); } } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ public Builder setStreamWaitTime(com.google.protobuf.Duration value) { if (streamWaitTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } streamWaitTime_ = value; } else { streamWaitTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ public Builder setStreamWaitTime(com.google.protobuf.Duration.Builder builderForValue) { if (streamWaitTimeBuilder_ == null) { streamWaitTime_ = builderForValue.build(); } else { streamWaitTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ public Builder mergeStreamWaitTime(com.google.protobuf.Duration value) { if (streamWaitTimeBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && streamWaitTime_ != null && streamWaitTime_ != com.google.protobuf.Duration.getDefaultInstance()) { getStreamWaitTimeBuilder().mergeFrom(value); } else { streamWaitTime_ = value; } } else { streamWaitTimeBuilder_.mergeFrom(value); } if (streamWaitTime_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ public Builder clearStreamWaitTime() { bitField0_ = (bitField0_ & ~0x00000004); streamWaitTime_ = null; if (streamWaitTimeBuilder_ != null) { streamWaitTimeBuilder_.dispose(); streamWaitTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ public com.google.protobuf.Duration.Builder getStreamWaitTimeBuilder() { bitField0_ |= 0x00000004; onChanged(); return getStreamWaitTimeFieldBuilder().getBuilder(); } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ public com.google.protobuf.DurationOrBuilder getStreamWaitTimeOrBuilder() { if (streamWaitTimeBuilder_ != null) { return streamWaitTimeBuilder_.getMessageOrBuilder(); } else { return streamWaitTime_ == null ? com.google.protobuf.Duration.getDefaultInstance() : streamWaitTime_; } } /** * * * <pre> * The wait time between each server streaming messages * </pre> * * <code>.google.protobuf.Duration stream_wait_time = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getStreamWaitTimeFieldBuilder() { if (streamWaitTimeBuilder_ == null) { streamWaitTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getStreamWaitTime(), getParentForChildren(), isClean()); streamWaitTime_ = null; } return streamWaitTimeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.showcase.v1beta1.ExpandRequest) } // @@protoc_insertion_point(class_scope:google.showcase.v1beta1.ExpandRequest) private static final com.google.showcase.v1beta1.ExpandRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.showcase.v1beta1.ExpandRequest(); } public static com.google.showcase.v1beta1.ExpandRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExpandRequest> PARSER = new com.google.protobuf.AbstractParser<ExpandRequest>() { @java.lang.Override public ExpandRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExpandRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExpandRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.showcase.v1beta1.ExpandRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,443
java-geminidataanalytics/proto-google-cloud-geminidataanalytics-v1beta/src/main/java/com/google/cloud/geminidataanalytics/v1beta/ChartMessage.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/geminidataanalytics/v1beta/data_chat_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.geminidataanalytics.v1beta; /** * * * <pre> * A message produced during chart generation. * </pre> * * Protobuf type {@code google.cloud.geminidataanalytics.v1beta.ChartMessage} */ public final class ChartMessage extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.geminidataanalytics.v1beta.ChartMessage) ChartMessageOrBuilder { private static final long serialVersionUID = 0L; // Use ChartMessage.newBuilder() to construct. private ChartMessage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ChartMessage() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ChartMessage(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.geminidataanalytics.v1beta.DataChatServiceProto .internal_static_google_cloud_geminidataanalytics_v1beta_ChartMessage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.geminidataanalytics.v1beta.DataChatServiceProto .internal_static_google_cloud_geminidataanalytics_v1beta_ChartMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.geminidataanalytics.v1beta.ChartMessage.class, com.google.cloud.geminidataanalytics.v1beta.ChartMessage.Builder.class); } private int kindCase_ = 0; @SuppressWarnings("serial") private java.lang.Object kind_; public enum KindCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { QUERY(1), RESULT(2), KIND_NOT_SET(0); private final int value; private KindCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static KindCase valueOf(int value) { return forNumber(value); } public static KindCase forNumber(int value) { switch (value) { case 1: return QUERY; case 2: return RESULT; case 0: return KIND_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public KindCase getKindCase() { return KindCase.forNumber(kindCase_); } public static final int QUERY_FIELD_NUMBER = 1; /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> * * @return Whether the query field is set. */ @java.lang.Override public boolean hasQuery() { return kindCase_ == 1; } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> * * @return The query. */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartQuery getQuery() { if (kindCase_ == 1) { return (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartQuery.getDefaultInstance(); } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartQueryOrBuilder getQueryOrBuilder() { if (kindCase_ == 1) { return (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartQuery.getDefaultInstance(); } public static final int RESULT_FIELD_NUMBER = 2; /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> * * @return Whether the result field is set. */ @java.lang.Override public boolean hasResult() { return kindCase_ == 2; } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> * * @return The result. */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartResult getResult() { if (kindCase_ == 2) { return (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartResult.getDefaultInstance(); } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartResultOrBuilder getResultOrBuilder() { if (kindCase_ == 2) { return (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartResult.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (kindCase_ == 1) { output.writeMessage(1, (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_); } if (kindCase_ == 2) { output.writeMessage(2, (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (kindCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_); } if (kindCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.geminidataanalytics.v1beta.ChartMessage)) { return super.equals(obj); } com.google.cloud.geminidataanalytics.v1beta.ChartMessage other = (com.google.cloud.geminidataanalytics.v1beta.ChartMessage) obj; if (!getKindCase().equals(other.getKindCase())) return false; switch (kindCase_) { case 1: if (!getQuery().equals(other.getQuery())) return false; break; case 2: if (!getResult().equals(other.getResult())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (kindCase_) { case 1: hash = (37 * hash) + QUERY_FIELD_NUMBER; hash = (53 * hash) + getQuery().hashCode(); break; case 2: hash = (37 * hash) + RESULT_FIELD_NUMBER; hash = (53 * hash) + getResult().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.geminidataanalytics.v1beta.ChartMessage prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A message produced during chart generation. * </pre> * * Protobuf type {@code google.cloud.geminidataanalytics.v1beta.ChartMessage} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.geminidataanalytics.v1beta.ChartMessage) com.google.cloud.geminidataanalytics.v1beta.ChartMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.geminidataanalytics.v1beta.DataChatServiceProto .internal_static_google_cloud_geminidataanalytics_v1beta_ChartMessage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.geminidataanalytics.v1beta.DataChatServiceProto .internal_static_google_cloud_geminidataanalytics_v1beta_ChartMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.geminidataanalytics.v1beta.ChartMessage.class, com.google.cloud.geminidataanalytics.v1beta.ChartMessage.Builder.class); } // Construct using com.google.cloud.geminidataanalytics.v1beta.ChartMessage.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (queryBuilder_ != null) { queryBuilder_.clear(); } if (resultBuilder_ != null) { resultBuilder_.clear(); } kindCase_ = 0; kind_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.geminidataanalytics.v1beta.DataChatServiceProto .internal_static_google_cloud_geminidataanalytics_v1beta_ChartMessage_descriptor; } @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartMessage getDefaultInstanceForType() { return com.google.cloud.geminidataanalytics.v1beta.ChartMessage.getDefaultInstance(); } @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartMessage build() { com.google.cloud.geminidataanalytics.v1beta.ChartMessage result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartMessage buildPartial() { com.google.cloud.geminidataanalytics.v1beta.ChartMessage result = new com.google.cloud.geminidataanalytics.v1beta.ChartMessage(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.geminidataanalytics.v1beta.ChartMessage result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs( com.google.cloud.geminidataanalytics.v1beta.ChartMessage result) { result.kindCase_ = kindCase_; result.kind_ = this.kind_; if (kindCase_ == 1 && queryBuilder_ != null) { result.kind_ = queryBuilder_.build(); } if (kindCase_ == 2 && resultBuilder_ != null) { result.kind_ = resultBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.geminidataanalytics.v1beta.ChartMessage) { return mergeFrom((com.google.cloud.geminidataanalytics.v1beta.ChartMessage) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.geminidataanalytics.v1beta.ChartMessage other) { if (other == com.google.cloud.geminidataanalytics.v1beta.ChartMessage.getDefaultInstance()) return this; switch (other.getKindCase()) { case QUERY: { mergeQuery(other.getQuery()); break; } case RESULT: { mergeResult(other.getResult()); break; } case KIND_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getQueryFieldBuilder().getBuilder(), extensionRegistry); kindCase_ = 1; break; } // case 10 case 18: { input.readMessage(getResultFieldBuilder().getBuilder(), extensionRegistry); kindCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int kindCase_ = 0; private java.lang.Object kind_; public KindCase getKindCase() { return KindCase.forNumber(kindCase_); } public Builder clearKind() { kindCase_ = 0; kind_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.geminidataanalytics.v1beta.ChartQuery, com.google.cloud.geminidataanalytics.v1beta.ChartQuery.Builder, com.google.cloud.geminidataanalytics.v1beta.ChartQueryOrBuilder> queryBuilder_; /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> * * @return Whether the query field is set. */ @java.lang.Override public boolean hasQuery() { return kindCase_ == 1; } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> * * @return The query. */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartQuery getQuery() { if (queryBuilder_ == null) { if (kindCase_ == 1) { return (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartQuery.getDefaultInstance(); } else { if (kindCase_ == 1) { return queryBuilder_.getMessage(); } return com.google.cloud.geminidataanalytics.v1beta.ChartQuery.getDefaultInstance(); } } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ public Builder setQuery(com.google.cloud.geminidataanalytics.v1beta.ChartQuery value) { if (queryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } kind_ = value; onChanged(); } else { queryBuilder_.setMessage(value); } kindCase_ = 1; return this; } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ public Builder setQuery( com.google.cloud.geminidataanalytics.v1beta.ChartQuery.Builder builderForValue) { if (queryBuilder_ == null) { kind_ = builderForValue.build(); onChanged(); } else { queryBuilder_.setMessage(builderForValue.build()); } kindCase_ = 1; return this; } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ public Builder mergeQuery(com.google.cloud.geminidataanalytics.v1beta.ChartQuery value) { if (queryBuilder_ == null) { if (kindCase_ == 1 && kind_ != com.google.cloud.geminidataanalytics.v1beta.ChartQuery.getDefaultInstance()) { kind_ = com.google.cloud.geminidataanalytics.v1beta.ChartQuery.newBuilder( (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_) .mergeFrom(value) .buildPartial(); } else { kind_ = value; } onChanged(); } else { if (kindCase_ == 1) { queryBuilder_.mergeFrom(value); } else { queryBuilder_.setMessage(value); } } kindCase_ = 1; return this; } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ public Builder clearQuery() { if (queryBuilder_ == null) { if (kindCase_ == 1) { kindCase_ = 0; kind_ = null; onChanged(); } } else { if (kindCase_ == 1) { kindCase_ = 0; kind_ = null; } queryBuilder_.clear(); } return this; } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ public com.google.cloud.geminidataanalytics.v1beta.ChartQuery.Builder getQueryBuilder() { return getQueryFieldBuilder().getBuilder(); } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartQueryOrBuilder getQueryOrBuilder() { if ((kindCase_ == 1) && (queryBuilder_ != null)) { return queryBuilder_.getMessageOrBuilder(); } else { if (kindCase_ == 1) { return (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartQuery.getDefaultInstance(); } } /** * * * <pre> * A query for generating a chart. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartQuery query = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.geminidataanalytics.v1beta.ChartQuery, com.google.cloud.geminidataanalytics.v1beta.ChartQuery.Builder, com.google.cloud.geminidataanalytics.v1beta.ChartQueryOrBuilder> getQueryFieldBuilder() { if (queryBuilder_ == null) { if (!(kindCase_ == 1)) { kind_ = com.google.cloud.geminidataanalytics.v1beta.ChartQuery.getDefaultInstance(); } queryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.geminidataanalytics.v1beta.ChartQuery, com.google.cloud.geminidataanalytics.v1beta.ChartQuery.Builder, com.google.cloud.geminidataanalytics.v1beta.ChartQueryOrBuilder>( (com.google.cloud.geminidataanalytics.v1beta.ChartQuery) kind_, getParentForChildren(), isClean()); kind_ = null; } kindCase_ = 1; onChanged(); return queryBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.geminidataanalytics.v1beta.ChartResult, com.google.cloud.geminidataanalytics.v1beta.ChartResult.Builder, com.google.cloud.geminidataanalytics.v1beta.ChartResultOrBuilder> resultBuilder_; /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> * * @return Whether the result field is set. */ @java.lang.Override public boolean hasResult() { return kindCase_ == 2; } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> * * @return The result. */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartResult getResult() { if (resultBuilder_ == null) { if (kindCase_ == 2) { return (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartResult.getDefaultInstance(); } else { if (kindCase_ == 2) { return resultBuilder_.getMessage(); } return com.google.cloud.geminidataanalytics.v1beta.ChartResult.getDefaultInstance(); } } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ public Builder setResult(com.google.cloud.geminidataanalytics.v1beta.ChartResult value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } kind_ = value; onChanged(); } else { resultBuilder_.setMessage(value); } kindCase_ = 2; return this; } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ public Builder setResult( com.google.cloud.geminidataanalytics.v1beta.ChartResult.Builder builderForValue) { if (resultBuilder_ == null) { kind_ = builderForValue.build(); onChanged(); } else { resultBuilder_.setMessage(builderForValue.build()); } kindCase_ = 2; return this; } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ public Builder mergeResult(com.google.cloud.geminidataanalytics.v1beta.ChartResult value) { if (resultBuilder_ == null) { if (kindCase_ == 2 && kind_ != com.google.cloud.geminidataanalytics.v1beta.ChartResult.getDefaultInstance()) { kind_ = com.google.cloud.geminidataanalytics.v1beta.ChartResult.newBuilder( (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_) .mergeFrom(value) .buildPartial(); } else { kind_ = value; } onChanged(); } else { if (kindCase_ == 2) { resultBuilder_.mergeFrom(value); } else { resultBuilder_.setMessage(value); } } kindCase_ = 2; return this; } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ public Builder clearResult() { if (resultBuilder_ == null) { if (kindCase_ == 2) { kindCase_ = 0; kind_ = null; onChanged(); } } else { if (kindCase_ == 2) { kindCase_ = 0; kind_ = null; } resultBuilder_.clear(); } return this; } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ public com.google.cloud.geminidataanalytics.v1beta.ChartResult.Builder getResultBuilder() { return getResultFieldBuilder().getBuilder(); } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartResultOrBuilder getResultOrBuilder() { if ((kindCase_ == 2) && (resultBuilder_ != null)) { return resultBuilder_.getMessageOrBuilder(); } else { if (kindCase_ == 2) { return (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_; } return com.google.cloud.geminidataanalytics.v1beta.ChartResult.getDefaultInstance(); } } /** * * * <pre> * The result of a chart generation query. * </pre> * * <code>.google.cloud.geminidataanalytics.v1beta.ChartResult result = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.geminidataanalytics.v1beta.ChartResult, com.google.cloud.geminidataanalytics.v1beta.ChartResult.Builder, com.google.cloud.geminidataanalytics.v1beta.ChartResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { if (!(kindCase_ == 2)) { kind_ = com.google.cloud.geminidataanalytics.v1beta.ChartResult.getDefaultInstance(); } resultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.geminidataanalytics.v1beta.ChartResult, com.google.cloud.geminidataanalytics.v1beta.ChartResult.Builder, com.google.cloud.geminidataanalytics.v1beta.ChartResultOrBuilder>( (com.google.cloud.geminidataanalytics.v1beta.ChartResult) kind_, getParentForChildren(), isClean()); kind_ = null; } kindCase_ = 2; onChanged(); return resultBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.geminidataanalytics.v1beta.ChartMessage) } // @@protoc_insertion_point(class_scope:google.cloud.geminidataanalytics.v1beta.ChartMessage) private static final com.google.cloud.geminidataanalytics.v1beta.ChartMessage DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.geminidataanalytics.v1beta.ChartMessage(); } public static com.google.cloud.geminidataanalytics.v1beta.ChartMessage getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ChartMessage> PARSER = new com.google.protobuf.AbstractParser<ChartMessage>() { @java.lang.Override public ChartMessage parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ChartMessage> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ChartMessage> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.geminidataanalytics.v1beta.ChartMessage getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
35,807
truffle/src/com.oracle.truffle.api.instrumentation/src/com/oracle/truffle/api/instrumentation/InstrumentableNode.java
/* * Copyright (c) 2018, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.api.instrumentation; import java.util.Set; import com.oracle.truffle.api.CallTarget; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.TruffleLanguage; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.instrumentation.InstrumentableNode.WrapperNode; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.Node.Child; import com.oracle.truffle.api.nodes.NodeInterface; import com.oracle.truffle.api.nodes.RootNode; import com.oracle.truffle.api.source.Source; import com.oracle.truffle.api.source.SourceSection; /** * Interface implemented by AST {@link Node nodes} that may be <em>instrumentable</em>: an AST * location where {@linkplain com.oracle.truffle.api.instrumentation.TruffleInstrument Truffle * instruments} are permitted to listen to before and after using execution event listeners. * <p> * If a node is instrumentable depends on the return value of {@link #isInstrumentable()}. All * instrumentable nodes must also extend {@link Node node}. All other member methods of this * interface are only allowed to be invoked if {@link #isInstrumentable()} returns <code>true</code> * . * <p> * Every instrumentable node is required to create a wrapper for this instrumentable node in * {@link #createWrapper(ProbeNode)}. The instrumentation framework will, when needed during * execution, {@link Node#replace(Node) replace} the instrumentable node with a {@link WrapperNode * wrapper} and delegate to the original node. After the replacement of an instrumentable node with * a wrapper we refer to the original node as an instrumented node. * <p> * Wrappers can be generated automatically using an annotation processor by annotating the class * with @{@link GenerateWrapper}. If an instrumentable node subclass has additional declared methods * than its instrumentable base class that are used by other nodes, then a new wrapper should be * generated or implemented for the subclass, otherwise the replacement of the wrapper will fail. * <p> * Instrumentable nodes may return <code>true</code> to indicate that they were tagged by {@link Tag * tag}. Tags are used by guest languages to indicate that a {@link Node node} is a member of a * certain category of nodes. For example a debugger * {@link com.oracle.truffle.api.instrumentation.TruffleInstrument instrument} might require a guest * language to tag all nodes as {@link StandardTags.StatementTag statements} that should be * considered as such. See {@link #hasTag(Class)} for further details on how to use tags. * <p> * <b>Example minimal implementation of an instrumentable node:</b> * * {@snippet file = "com/oracle/truffle/api/instrumentation/InstrumentableNode.java" region = * "com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.SimpleNode"} * * <p> * Example for a typical implementation of an instrumentable node with support for source * sections:</b> * * {@snippet file = "com/oracle/truffle/api/instrumentation/InstrumentableNode.java" region = * "com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.RecommendedNode"} * * <p> * * @see #isInstrumentable() to decide whether node is instrumentable. * @see #hasTag(Class) Implement hasTag to decide whether an instrumentable node is tagged with a * tag. * @see GenerateWrapper Use an annotation processor to generate the wrapper class. * @see Instrumenter#attachExecutionEventListener(SourceSectionFilter, ExecutionEventListener) * @since 0.33 */ public interface InstrumentableNode extends NodeInterface { /** * Returns <code>true</code> if this node is instrumentable. Instrumentable nodes are points * where instrumenters can attach execution events. The return values of instrumentable nodes * must always be interop capable values. * <p> * The implementation of this method must ensure that its result is stable after the parent * {@link RootNode root node} was wrapped in a {@link CallTarget} using * {@link RootNode#getCallTarget()}. The result is stable if the result of calling this method * remains always the same. * <p> * This method might be called in parallel from multiple threads even if the language is single * threaded. The method may be invoked without a language context currently being active. * * @since 0.33 */ boolean isInstrumentable(); /** * Returns a new, never adopted, unshared {@link WrapperNode wrapper} node implementation for * this {@link InstrumentableNode instrumentable} node. The returned wrapper implementation must * extend the same type that implements {@link InstrumentableNode}. * <p> * The instrumentation framework will, when needed during execution, {@link Node#replace(Node) * replace} the instrumentable node with a {@link WrapperNode wrapper} and delegate to the * original node. After the replacement of an instrumentable node with a wrapper we refer to the * original node as an instrumented node. Wrappers can be generated automatically using an * annotation processor by annotating the class with @{@link GenerateWrapper}. Please note that * if an instrumentable node subclass has additional execute methods then a new wrapper must be * generated or implemented. Otherwise the {@link Node#replace(Node) replacement} of the * instrumentable node with the wrapper will fail if the subtype is used as static type in nodes * {@link Child children}. * <p> * A wrapper forwards the following events concerning the delegate to the given {@link ProbeNode * probe} for propagation through the instrumentation framework, e.g. to * {@linkplain ExecutionEventListener event listeners} bound to this guest language program * location: * <ul> * <li>{@linkplain ProbeNode#onEnter(com.oracle.truffle.api.frame.VirtualFrame) onEnter(Frame)}: * an <em>execute</em> method on the delegate is ready to be called;</li> * <li>{@linkplain ProbeNode#onReturnValue(com.oracle.truffle.api.frame.VirtualFrame, Object) * onReturnValue(Frame,Object)}: an <em>execute</em> method on the delegate has just returned a * (possibly <code>null</code>) value;</li> * <li>{@linkplain ProbeNode#onReturnExceptionalOrUnwind(VirtualFrame, Throwable, boolean) * onReturnExceptionalOrUnwind(Frame,Throwable, boolean)}: an <em>execute</em> method on the * delegate has just thrown an exception.</li> * </ul> * <p> * This method is always invoked on an interpreter thread. The method may be invoked without a * language context currently being active. * <p> * If {@link #findProbe()} is overriden and never returns a <code>null</code> value, then * {@link #createWrapper(ProbeNode)} does not need to be implemented and may throw an * {@link UnsupportedOperationException} instead. * * @param probe the {@link ProbeNode probe node} to be adopted and sent execution events by the * wrapper * @return a {@link WrapperNode wrapper} implementation * @since 0.33 */ WrapperNode createWrapper(ProbeNode probe); /** * Determines how to find a probe given an instrumentable node. Implementing this method allows * to customize probe storage, e.g. if a different strategy should be used other than the * default wrapper node strategy. The default implementation discovers the probe through the * parent wrapper node by calling {@link WrapperNode#getProbeNode()}. A probe can be initialized * lazily on {@link #findProbe()} calls using {@link #createProbe(SourceSection)}. This method * will never be invoked if {@link #isInstrumentable()} returns <code>false</code>. * <p> * If this method returns <code>null</code> then the default wrapper node strategy will be * applied for this instrumentable node. A custom probe storage strategy must therefore ensure * that this method never returns <code>null</code>. * <p> * The probe must be stored/read from a reference with volatile semantics. This method must * produce a {@link CompilerDirectives#isPartialEvaluationConstant(Object) partial evaluation * constant} if the receiver is a PE constant. * * @see #createProbe(SourceSection) * @since 24.2 */ default ProbeNode findProbe() { Node parent = ((Node) this).getParent(); if (parent instanceof WrapperNode w) { return w.getProbeNode(); } return null; } /** * Returns <code>true</code> if this node should be considered tagged by a given tag else * <code>false</code>. In order for a Truffle language to support a particular tag, the tag must * also be marked as {@link ProvidedTags provided} by the language. * <p> * Tags are used by guest languages to indicate that a {@link Node node} is a member of a * certain category of nodes. For example a debugger {@link TruffleInstrument instrument} might * require a guest language to tag all nodes as statements that should be considered as such. * <p> * The node implementor may decide how to implement tagging for nodes. The simplest way to * implement tagging using Java types is by overriding the {@link #hasTag(Class)} method. This * example shows how to tag a node subclass and all its subclasses as statement: * * {@snippet file = "com/oracle/truffle/api/instrumentation/InstrumentableNode.java" region = * "com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.StatementNode"} * * <p> * Often it is impossible to just rely on the node's Java type to implement tagging. This * example shows how to use local state to implement tagging for a node. * * {@snippet file = "com/oracle/truffle/api/instrumentation/InstrumentableNode.java" region = * "com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.HaltNode"} * * <p> * The implementation of hasTag method must ensure that its result is stable after the parent * {@link RootNode root node} was wrapped in a {@link CallTarget} using * {@link RootNode#getCallTarget()}. The result is stable if the result of calling this method * for a particular tag remains always the same. * <p> * This method might be called in parallel from multiple threads even if the language is single * threaded. The method may be invoked without a language context currently being active. * * @param tag the class {@link com.oracle.truffle.api.instrumentation.ProvidedTags provided} by * the {@link TruffleLanguage language} * @return <code>true</code> if the node should be considered tagged by a tag else * <code>false</code>. * @since 0.33 */ default boolean hasTag(Class<? extends Tag> tag) { return false; } /** * Returns an interop capable object that contains all keys and values of attributes associated * with this node. The returned object must return <code>true</code> in response to the * {@link com.oracle.truffle.api.interop.InteropLibrary#hasMembers(Object) has members} message. * If <code>null</code> is returned then an empty tag object without any readable keys will be * assumed. Multiple calls to {@link #getNodeObject()} for a particular node may return the same * or objects with different identity. The returned object must not support any write operation. * The returned object must not support execution or instantiation and must not have a size. * <p> * For performance reasons it is not recommended to eagerly collect all properties of the node * object when {@link #getNodeObject()} is invoked. Instead, the language should lazily compute * them when they are read. If the node object contains dynamic properties, that change during * the execution of the AST, then the node must return an updated value for each key when it is * read repeatedly. In other words the node object must always represent the current state of * this AST {@link Node node}. The implementer should not cache the node instance in the AST. * The instrumentation framework will take care of caching node object instances when they are * requested by tools. * <p> * <b>Compatibility:</b> In addition to the expected keys by the tag specification, the language * implementation may provide any set of additional keys and values. Tools might depend on these * language specific tags and might break if keys or values are changed without notice. * <p> * For a memory efficient implementation the language might make the instrumentable {@link Node} * a TruffleObject and return this instance. * <p> * This method might be called in parallel from multiple threads even if the language is single * threaded. The method may be invoked without a language context currently being active. The * {@link Node#getLock() AST lock} is held while {@link #getNodeObject()} object is invoked. * There is no lock held when the object is read. * * @return the node object as TruffleObject or <code>null</code> if no node object properties * are available for this instrumented node * @since 0.33 */ default Object getNodeObject() { return null; } /** * Removes optimizations performed in this AST node to restore the syntactic AST structure. * Guest languages may decide to group multiple nodes together into a single node. This is * useful to reduce the memory consumed by the AST representation and it can also improve the * execution performance when interpreting the AST. Performing such optimizations often modify * the syntactic AST structure, leading to invalid execution events reported to the * instrumentation framework. Implementing this method allows the instrumented node to restore * the syntactic AST structure when needed. It provides a list of tags that were requested by * all current execution event bindings to allow the language to do the materialization * selectively for instrumentable nodes with certain tags only. * <p> * The returned instrumentable nodes must return themselves when this method is called on them * with the same tags. Materialized nodes should not be re-materialized again. Instrumentation * relies on the stability of materialized nodes. Use {@link Node#notifyInserted(Node)} when you * need to change the structure of instrumentable nodes. * <p> * Node must return itself from this method when it has already seen all the materializedTags * specified as an argument, i.e., not only if the set of tags is exactly the same as before, * but also if the current set of tags is completely contained in the union of all the sets of * tags specified in all the calls of this method that led to creation of this materialized * node. * <p> * If the node returns a new node from this method, the subtree rooted at the new node must be * completely unadopted, i.e., all nodes it contains must not have existed in the original AST. * Also, the new subtree must be completely materialized, so that no new materializations occur * when the instrumentation framework instruments the new subtree during the current traversal. * <p> * The AST lock is acquired while this method is invoked. Therefore it is not allowed to run * guest language code while this method is invoked. This method might be called in parallel * from multiple threads even if the language is single threaded. The method may be invoked * without a language context currently being active. Language reference is always available. * <p> * In the example below, we show how the <code>IncrementNode</code> with a * <code>ConstantNode</code> child is optimized into a <code>ConstantIncrementNode</code> and * how it can implement <code>materializeSyntaxNodes</code> to restore the syntactic structure * of the AST: * <p> * {@snippet file = "com/oracle/truffle/api/instrumentation/InstrumentableNode.java" region = * "com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.ExpressionNode"} * * @param materializedTags a set of tags that requested to be materialized * @since 0.33 */ default InstrumentableNode materializeInstrumentableNodes(Set<Class<? extends Tag>> materializedTags) { return this; } /** * Find the nearest {@link Node node} to the given source character index according to the guest * language control flow, that is tagged with some of the given tags. The source character index * is in this node's source. The nearest node will preferably be in the same block/function as * the character index. This node acts as a context node - either a node containing the * character index if such node exists, or node following the character index if exists, or node * preceding the character index otherwise. * <p> * Return an instrumentable node that is tagged with some of the tags and containing the * character index, if such exists and there is not a more suitable sibling node inside the * container source section. Return the next sibling tagged node otherwise, or the previous one * when the next one does not exist. * <p> * <u>Use Case</u><br> * The current use-case of this method is a relocation of breakpoint position, for instance. * When a user submits a breakpoint at the source character index, a nearest logical * instrumentable node that has suitable tags needs to be found to move the breakpoint * accordingly. * <p> * <u>Default Implementation</u><br> * This method has a default implementation, which assumes that the materialized Truffle * {@link Node} hierarchy corresponds with the logical guest language AST structure. If this is * not the case for a particular guest language, this method needs to be implemented, possibly * with the help of language specific AST node classes. * <p> * The default algorithm is following:<br> * <ol> * <li>If the character index is smaller than the start index of this node's source section, * return the first tagged child of this node.</li> * <li>If the character index is larger than the end index of this node's source section, return * the last tagged child of this node.</li> * <li>Otherwise, this node's source section contains the character index. Use following steps * to find the nearest tagged node in this node's hierarchy: * <ol type="a"> * <li>Traverse the node children in declaration order (AST breadth-first order). For every * child do: * <ol> * <li>When the child is not instrumentable, include its children into the traversal.</li> * <li>When the child does not have a source section assigned, ignore it.</li> * <li>When the <code>sourceCharIndex</code> is inside the child's source section, find if it's * tagged with one of the tags (store as <code>isTagged</code>) and repeat recursively from * <b>3.a.</b> using this child as the node.</li> * <li>When the child is above the character index, remember a sorted list of such children up * to the lowest tagged child (store in <code>higherNodes</code> list).</li> * <li>When the child is below the character index, remember a sorted list of such children down * to the highest tagged child (store in <code>lowerNodes</code> list).</li> * </ol> * </li> * <li>If a tagged child node was found in <b>3.a</b> with source section matching the * <code>sourceCharIndex</code>, return it.</li> * <li>Otherwise, we check the list of lower/higher nodes: * <ol> * <li>Prefer the node after the character index.</li> * <li>Traverse <code>higherNodes</code> in ascending order. When the node is tagged, return it, * when not, repeat with that node from <b>3.a.</b></li> * <li>If no tagged node was found, traverse <code>lowerNodes</code> in descending order. When * the node is tagged, return it, when not, repeat with that node from <b>3.a.</b></li> * <li>When nothing was found in the steps above, return <code>null</code>.</li> * </ol> * </li> * <li>If <b>c.</b> didn't provide a tagged node, apply this algorithm recursively to a parent * of this node, if exists. If you encounter the nearest tagged parent node found in <b>3.a</b>, * return it. Otherwise, return a tagged child found in the steps above, if any.</li> * </ol> * </li> * </ol> * * @param sourceCharIndex the 0-based character index in this node's source, to find the nearest * tagged node from * @param tags a set of tags, the nearest node needs to be tagged with at least one tag from * this set * @return the nearest instrumentable node according to the execution flow and tagged with some * of the tags, or <code>null</code> when none was found * @see #findNearestNodeAt(int, int, Set) * @since 0.33 */ default Node findNearestNodeAt(int sourceCharIndex, Set<Class<? extends Tag>> tags) { return DefaultNearestNodeSearch.findNearestNodeAt(sourceCharIndex, (Node) this, tags); } /** * Find the nearest {@link Node node} to the given source line and column position, according to * the guest language control flow, that is tagged with some of the given tags. * <p> * Behaves in the same way as {@link #findNearestNodeAt(int, Set)} but uses line/column as the * position specification instead of a character index. * * @param line 1-based line number * @param column 1-based column number, or less than one when the column is unknown * @param tags a set of tags, the nearest node needs to be tagged with at least one tag from * this set * @return the nearest instrumentable node according to the execution flow and tagged with some * of the tags, or <code>null</code> when none was found * @see #findNearestNodeAt(int, Set) * @since 23.0 */ default Node findNearestNodeAt(int line, int column, Set<Class<? extends Tag>> tags) { if (line < 1) { throw new IllegalArgumentException("A 1-based line needs to be specified, was " + line); } return DefaultNearestNodeSearch.findNearestNodeAt(line, column, (Node) this, tags); } /** * Find the first {@link #isInstrumentable() instrumentable} node on it's parent chain. If the * provided node is instrumentable itself, it is returned. If not, the first parent node that is * instrumentable is returned, if any. * * @param node a Node * @return the first instrumentable node, or <code>null</code> when no instrumentable parent * exists. * @since 20.3 */ static Node findInstrumentableParent(Node node) { Node inode = node; while (inode != null && (inode instanceof WrapperNode || !(inode instanceof InstrumentableNode && ((InstrumentableNode) inode).isInstrumentable()))) { inode = inode.getParent(); } assert inode == null || inode instanceof InstrumentableNode && ((InstrumentableNode) inode).isInstrumentable() : inode; assert !(inode instanceof WrapperNode) : inode; return inode; } /** * Method allows to create an eager probe node given an instrumentable node. This is useful to * implement custom probe storage by implementing {@link #findProbe()}. * * @param sourceSection the eager materialized source section for this probe. * @since 24.2 */ default ProbeNode createProbe(SourceSection sourceSection) { return new ProbeNode(this, sourceSection); } /** * Nodes that the instrumentation framework inserts into guest language ASTs (between * {@link InstrumentableNode instrumentable} guest language nodes and their parents) for the * purpose of interposing on execution events and reporting them via the instrumentation * framework. * * @see #createWrapper(ProbeNode) * @since 0.33 */ public interface WrapperNode extends NodeInterface { /** * The {@link InstrumentableNode instrumentable} guest language node, adopted as a child, * whose execution events the wrapper reports to the instrumentation framework. * <p> * This method might be called in parallel from multiple threads. The method may be invoked * without a language context currently being active. * * @since 0.33 */ Node getDelegateNode(); /** * A child of the wrapper, through which the wrapper reports execution events related to the * guest language <em>delegate</em> node. * <p> * This method might be called in parallel from multiple threads. The method may be invoked * without a language context currently being active. * * @since 0.33 */ ProbeNode getProbeNode(); } } // @formatter:off // @replace regex='.*' replacement='' class InstrumentableNodeSnippets { static class SimpleNodeWrapper implements WrapperNode { @SuppressWarnings("unused") SimpleNodeWrapper(SimpleNode delegate, ProbeNode probe) { } public Node getDelegateNode() { return null; } public ProbeNode getProbeNode() { return null; } } // @start region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.SimpleNode" @GenerateWrapper abstract static class SimpleNode extends Node implements InstrumentableNode { public abstract Object execute(VirtualFrame frame); public boolean isInstrumentable() { return true; } public WrapperNode createWrapper(ProbeNode probe) { // ASTNodeWrapper is generated by @GenerateWrapper return new SimpleNodeWrapper(this, probe); } } // @end region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.SimpleNode" static class RecommendedNodeWrapper implements WrapperNode { @SuppressWarnings("unused") RecommendedNodeWrapper(RecommendedNode delegate, ProbeNode probe) { } public Node getDelegateNode() { return null; } public ProbeNode getProbeNode() { return null; } } // @start region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.RecommendedNode" @GenerateWrapper abstract static class RecommendedNode extends Node implements InstrumentableNode { private static final int NO_SOURCE = -1; private int sourceCharIndex = NO_SOURCE; private int sourceLength; public abstract Object execute(VirtualFrame frame); // invoked by the parser to set the source void setSourceSection(int charIndex, int length) { assert sourceCharIndex == NO_SOURCE : "source should only be set once"; this.sourceCharIndex = charIndex; this.sourceLength = length; } public final boolean isInstrumentable() { // all AST nodes with source are instrumentable return sourceCharIndex != NO_SOURCE; } @Override @TruffleBoundary public final SourceSection getSourceSection() { if (sourceCharIndex == NO_SOURCE) { // AST node without source return null; } RootNode rootNode = getRootNode(); if (rootNode == null) { // not yet adopted yet return null; } Source source = rootNode.getSourceSection().getSource(); return source.createSection(sourceCharIndex, sourceLength); } public WrapperNode createWrapper(ProbeNode probe) { // ASTNodeWrapper is generated by @GenerateWrapper return new RecommendedNodeWrapper(this, probe); } } // @end region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.RecommendedNode" abstract static class StatementNodeWrapper implements WrapperNode { @SuppressWarnings("unused") static StatementNodeWrapper create(StatementNode statementNode, ProbeNode probe) { return null; } } // @start region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.StatementNode" @GenerateWrapper abstract static class StatementNode extends SimpleNode implements InstrumentableNode { @Override public final Object execute(VirtualFrame frame) { executeVoid(frame); return null; } public abstract void executeVoid(VirtualFrame frame); @Override public final WrapperNode createWrapper(ProbeNode probe) { return StatementNodeWrapper.create(this, probe); } public boolean hasTag(Class<? extends Tag> tag) { if (tag == StandardTags.StatementTag.class) { return true; } return false; } } // @end region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.StatementNode" private static final class Debugger { static class HaltTag extends Tag { } } @SuppressWarnings("unused") static class HaltNodeWrapper implements WrapperNode { HaltNodeWrapper(Node node, ProbeNode probe) { } public Node getDelegateNode() { return null; } public ProbeNode getProbeNode() { return null; } } @SuppressWarnings("unused") // @start region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.HaltNode" @GenerateWrapper static class HaltNode extends Node implements InstrumentableNode { private boolean isDebuggerHalt; public void setDebuggerHalt(boolean isDebuggerHalt) { this.isDebuggerHalt = isDebuggerHalt; } public Object execute(VirtualFrame frame) { // does nothing; return null; } public boolean isInstrumentable() { return true; } public boolean hasTag(Class<? extends Tag> tag) { if (tag == Debugger.HaltTag.class) { return isDebuggerHalt; } return false; } public WrapperNode createWrapper(ProbeNode probe) { return new HaltNodeWrapper(this, probe); } } // @end region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.HaltNode" @SuppressWarnings("unused") static class ExpressionNodeWrapper implements WrapperNode { ExpressionNodeWrapper(Node node, ProbeNode probe) { } public Node getDelegateNode() { return null; } public ProbeNode getProbeNode() { return null; } } // @start region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.ExpressionNode" @GenerateWrapper abstract static class ExpressionNode extends Node implements InstrumentableNode { abstract int execute(VirtualFrame frame); public boolean isInstrumentable() { return true; } public boolean hasTag(Class<? extends Tag> tag) { return tag == StandardTags.ExpressionTag.class; } public WrapperNode createWrapper(ProbeNode probe) { return new ExpressionNodeWrapper(this, probe); } } class ConstantNode extends ExpressionNode { private final int constant; ConstantNode(int constant) { this.constant = constant; } @Override int execute(VirtualFrame frame) { return constant; } } // node with constant folded operation class ConstantIncrementNode extends ExpressionNode { final int constantIncremented; ConstantIncrementNode(int constant) { this.constantIncremented = constant + 1; } // desguar to restore syntactic structure of the AST public InstrumentableNode materializeInstrumentableNodes( Set<Class<? extends Tag>> tags) { if (tags.contains(StandardTags.ExpressionTag.class)) { return new IncrementNode( new ConstantNode(constantIncremented - 1)); } return this; } @Override int execute(VirtualFrame frame) { return constantIncremented; } } // node with full semantics of the node. class IncrementNode extends ExpressionNode { @Child ExpressionNode child; IncrementNode(ExpressionNode child) { this.child = child; } @Override int execute(VirtualFrame frame) { return child.execute(frame) + 1; } } // @end region="com.oracle.truffle.api.instrumentation.InstrumentableNodeSnippets.ExpressionNode" }
apache/pinot
35,828
pinot-controller/src/test/java/org/apache/pinot/controller/helix/core/realtime/segment/SizeBasedSegmentFlushThresholdComputerTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.controller.helix.core.realtime.segment; import java.time.Clock; import java.time.ZoneId; import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.pinot.common.metadata.segment.SegmentZKMetadata; import org.apache.pinot.spi.stream.StreamConfig; import org.testng.annotations.Test; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.apache.pinot.common.protocols.SegmentCompletionProtocol.REASON_FORCE_COMMIT_MESSAGE_RECEIVED; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; public class SizeBasedSegmentFlushThresholdComputerTest { @Test public void testUseAutoTuneInitialRowsIfFirstSegmentInPartition() { int autoTuneInitialRows = 1_000; SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushAutotuneInitialRows()).thenReturn(autoTuneInitialRows); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); assertEquals(threshold, autoTuneInitialRows); } @Test public void testUseLastSegmentSizeTimesRatioIfFirstSegmentInPartitionAndNewPartitionGroup() { long segmentSizeBytes = 20000L; double segmentRowsToSizeRatio = 1.5; SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(Clock.systemUTC()); computer.setSizeForLastSegment(segmentSizeBytes); computer.setSegmentRowsToSizeRatio(segmentRowsToSizeRatio); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(segmentSizeBytes); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // segmentSize * 1.5 // 20000 * 1.5 assertEquals(threshold, 30000); } @Test public void testUseLastSegmentSizeTimesRatioIfFirstSegmentInPartitionAndNewPartitionGroupMinimumSize10000Rows() { long segmentSizeBytes = 2000L; double segmentRowsToSizeRatio = 1.5; SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(Clock.systemUTC()); computer.setSizeForLastSegment(segmentSizeBytes); computer.setSegmentRowsToSizeRatio(segmentRowsToSizeRatio); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(segmentSizeBytes); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); assertEquals(threshold, 10000); } @Test public void testUseLastSegmentsThresholdIfSegmentSizeMissing() { long segmentSizeBytes = 0L; int segmentSizeThreshold = 5_000; SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdTimeMillis()).thenReturn(123L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(segmentSizeBytes); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(segmentSizeThreshold); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); assertEquals(threshold, segmentSizeThreshold); } @Test public void testUseLastSegmentsThresholdIfSegmentIsCommittingDueToForceCommit() { long committingSegmentSizeBytes = 500_000L; int committingSegmentSizeThreshold = 25_000; SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(committingSegmentSizeBytes); when(committingSegmentDescriptor.getStopReason()).thenReturn(REASON_FORCE_COMMIT_MESSAGE_RECEIVED); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(committingSegmentSizeThreshold); StreamConfig streamConfig = mock(StreamConfig.class); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); assertEquals(threshold, committingSegmentSizeThreshold); } @Test public void testApplyMultiplierToTotalDocsWhenTimeThresholdNotReached() { long currentTime = 1640216032391L; Clock clock = Clock.fixed(java.time.Instant.ofEpochMilli(currentTime), ZoneId.of("UTC")); SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(clock); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_0000L); when(streamConfig.getFlushThresholdTimeMillis()).thenReturn(MILLISECONDS.convert(6, TimeUnit.HOURS)); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_0000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(10_000L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(20_000); when(committingSegmentZKMetadata.getCreationTime()).thenReturn( currentTime - MILLISECONDS.convert(1, TimeUnit.HOURS)); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // totalDocs * 1.1 // 10000 * 1.1 assertEquals(threshold, 11_000); } @Test public void testApplyMultiplierToAdjustedTotalDocsWhenTimeThresholdIsReached() { long currentTime = 1640216032391L; Clock clock = Clock.fixed(java.time.Instant.ofEpochMilli(currentTime), ZoneId.of("UTC")); SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(clock); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_0000L); when(streamConfig.getFlushThresholdTimeMillis()).thenReturn(MILLISECONDS.convert(1, TimeUnit.HOURS)); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_0000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(30_000L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(60_000); when(committingSegmentZKMetadata.getCreationTime()).thenReturn( currentTime - MILLISECONDS.convert(2, TimeUnit.HOURS)); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // (totalDocs / 2) * 1.1 // (30000 / 2) * 1.1 // 15000 * 1.1 assertEquals(threshold, 16_500); } @Test public void testSegmentSizeTooSmall() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_0000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(500_0000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(30_000L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(20_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // totalDocs / 2 // 30000 / 2 assertEquals(threshold, 15_000); } @Test public void testSegmentSizeTooBig() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(500_0000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_0000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(30_000L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(20_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // totalDocs + (totalDocs / 2) // 30000 + (30000 / 2) assertEquals(threshold, 45_000); } @Test public void testSegmentSizeJustRight() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_0000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(250_0000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(30_000L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(20_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // (totalDocs / segmentSize) * flushThresholdSegmentSize // (30000 / 250000) * 300000 assertEquals(threshold, 36_000); } @Test public void testNoRows() { int autoTuneInitialRows = 1_000; SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_0000L); when(streamConfig.getFlushAutotuneInitialRows()).thenReturn(autoTuneInitialRows); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(250_0000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(0L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(0); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Should use initial rows assertEquals(threshold, autoTuneInitialRows); } @Test public void testAdjustRowsToSizeRatio() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_0000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(30_000L, 50_000L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(60_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); // (totalDocs / segmentSize) // (30000 / 200000) assertEquals(computer.getSegmentRowsToSizeRatio(), 0.15); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); // (0.1 * (totalDocs / segmentSize)) + (0.9 * lastRatio) // (0.1 * (50000 / 200000)) + (0.9 * 0.15) // (0.1 * 0.25) + (0.9 * 0.15) assertEquals(computer.getSegmentRowsToSizeRatio(), 0.16); } @Test(invocationCount = 1000) public void testSegmentFlushThresholdVariance() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); int threshold = 90000; for (double var = 0; var <= 0.5; var += 0.05) { StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(200_0000L); when(streamConfig.getStreamConfigsMap()).thenReturn( Map.of("realtime.segment.flush.threshold.variance.percentage", String.valueOf(var))); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(300_000L); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(60_000L, 50_000L); when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(60_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int computedThreshold = computer.computeThreshold(streamConfig, "newSegmentName"); assertTrue(computedThreshold >= (1.0 - var) * threshold && computedThreshold <= (1.0 + var) * threshold); } } // ===== COMMIT-TIME COMPACTION TESTS ===== @Test public void testTraditionalBehaviorWithoutCommitTimeCompaction() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(-1); // Default: not available SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(30_000L); // Post-commit rows when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(20_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Should use post-commit rows (30,000) for calculation // (30,000 / 200,000) * 300,000 = 45,000 assertEquals(threshold, 45_000); assertEquals(computer.getSegmentRowsToSizeRatio(), 0.15); // 30,000 / 200,000 } @Test public void testTraditionalBehaviorWithZeroPreCommitRows() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(-1); // No pre-commit data available SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(25_000L); // Post-commit rows when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(20_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Should use post-commit rows (25,000) for calculation // (25,000 / 200,000) * 300,000 = 37,500 assertEquals(threshold, 37_500); assertEquals(computer.getSegmentRowsToSizeRatio(), 0.125); // 25,000 / 200,000 } @Test public void testCommitTimeCompactionWithHighCompactionRatio() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(100_000); // High pre-commit count SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(20_000L); // Low post-commit (80% compaction) when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(80_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Uses estimated pre-commit size (derived) leading to size too big, so halves pre-commit rows // Estimated pre-commit size = 200,000 * (100,000 / 20,000) = 1,000,000 > 450,000 => 100,000 / 2 = 50,000 assertEquals(threshold, 50_000); // Ratio equals post-commit rows / post-commit size assertEquals(computer.getSegmentRowsToSizeRatio(), 0.1); } @Test public void testCommitTimeCompactionWithMediumCompactionRatio() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(400_000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(250_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(60_000); // Pre-commit count SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(40_000L); // Post-commit (33% compaction) when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(50_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Estimated pre-commit size = 250,000 * (60,000 / 40,000) = 375,000 (within optimal) // Ratio equals 40,000 / 250,000 = 0.16 -> target rows = 400,000 * 0.16 = 64,000 assertEquals(threshold, 64_000); assertEquals(computer.getSegmentRowsToSizeRatio(), 0.16); } @Test public void testCommitTimeCompactionWithLowCompactionRatio() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(350_000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(180_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(45_000); // Pre-commit count SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(42_000L); // Post-commit (7% compaction) when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(40_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Estimated pre-commit size = 180,000 * (45,000 / 42,000) ≈ 192,857 (within optimal) // Ratio equals 45,000 / 192,857 ≈ 0.23333 -> target rows ≈ 81,666 assertEquals(threshold, 81_666); assertEquals(computer.getSegmentRowsToSizeRatio(), 45000d / 192857d, 1.0e-6); } @Test public void testCommitTimeCompactionWithTimeThresholdHit() { long currentTime = 1640216032391L; Clock clock = Clock.fixed(java.time.Instant.ofEpochMilli(currentTime), ZoneId.of("UTC")); SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(clock); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); when(streamConfig.getFlushThresholdTimeMillis()).thenReturn(MILLISECONDS.convert(6, TimeUnit.HOURS)); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(80_000); // High pre-commit count SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(15_000L); // Low post-commit (81% compaction) when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(100_000); when(committingSegmentZKMetadata.getCreationTime()).thenReturn( currentTime - MILLISECONDS.convert(1, TimeUnit.HOURS)); // Time threshold not hit computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Estimated pre-commit size = 200,000 * (80,000 / 15,000) ≈ 1,066,667 (> optimal), so halve pre-commit rows assertEquals(threshold, 40_000); } @Test public void testRatioCalculationProgressionWithCommitTimeCompaction() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); // First segment: traditional (no compaction) CommittingSegmentDescriptor firstDescriptor = mock(CommittingSegmentDescriptor.class); when(firstDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(firstDescriptor.getPreCommitRowCount()).thenReturn(-1); // No compaction SegmentZKMetadata firstMetadata = mock(SegmentZKMetadata.class); when(firstMetadata.getTotalDocs()).thenReturn(40_000L); when(firstMetadata.getSizeThresholdToFlushSegment()).thenReturn(30_000); computer.onSegmentCommit(firstDescriptor, firstMetadata); double firstRatio = computer.getSegmentRowsToSizeRatio(); assertEquals(firstRatio, 0.2); // 40,000 / 200,000 // Second segment: with compaction CommittingSegmentDescriptor secondDescriptor = mock(CommittingSegmentDescriptor.class); when(secondDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(secondDescriptor.getPreCommitRowCount()).thenReturn(60_000); // With compaction SegmentZKMetadata secondMetadata = mock(SegmentZKMetadata.class); when(secondMetadata.getTotalDocs()).thenReturn(30_000L); // Post-commit (50% compaction) when(secondMetadata.getSizeThresholdToFlushSegment()).thenReturn(40_000); computer.onSegmentCommit(secondDescriptor, secondMetadata); double secondRatio = computer.getSegmentRowsToSizeRatio(); // Ratio uses post-commit rows/size for the update when pre-commit rows are present // (0.1 * (30,000 / 200,000)) + (0.9 * 0.2) = (0.1 * 0.15) + (0.9 * 0.2) = 0.195 assertEquals(secondRatio, 0.195, 0.001); } @Test public void testDebugMixedScenarios() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); // First segment: With compaction CommittingSegmentDescriptor compactedDescriptor = mock(CommittingSegmentDescriptor.class); when(compactedDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(compactedDescriptor.getPreCommitRowCount()).thenReturn(80_000); SegmentZKMetadata compactedMetadata = mock(SegmentZKMetadata.class); when(compactedMetadata.getTotalDocs()).thenReturn(20_000L); when(compactedMetadata.getSizeThresholdToFlushSegment()).thenReturn(60_000); when(compactedMetadata.getSegmentName()).thenReturn("segment1"); computer.onSegmentCommit(compactedDescriptor, compactedMetadata); double firstRatio = computer.getSegmentRowsToSizeRatio(); assertEquals(firstRatio, 0.1, 0.001); // uses post-commit rows/size ratio // Test threshold after first segment int firstThreshold = computer.computeThreshold(streamConfig, "nextSegment1"); // Estimated pre-commit size is large -> halve pre-commit rows -> 40,000 assertEquals(firstThreshold, 40_000); } @Test public void testMixedScenariosWithAndWithoutCompaction() { long currentTime = System.currentTimeMillis(); Clock clock = Clock.fixed(java.time.Instant.ofEpochMilli(currentTime), ZoneId.of("UTC")); SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(clock); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); when(streamConfig.getFlushThresholdTimeMillis()).thenReturn(MILLISECONDS.convert(6, TimeUnit.HOURS)); // Segment 1: With compaction - Make it hit row threshold, not time threshold CommittingSegmentDescriptor compactedDescriptor = mock(CommittingSegmentDescriptor.class); when(compactedDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(compactedDescriptor.getPreCommitRowCount()).thenReturn(80_000); SegmentZKMetadata compactedMetadata = mock(SegmentZKMetadata.class); when(compactedMetadata.getTotalDocs()).thenReturn(20_000L); // Less than pre-commit to hit row threshold when(compactedMetadata.getSizeThresholdToFlushSegment()).thenReturn(60_000); when(compactedMetadata.getSegmentName()).thenReturn("segment1"); when(compactedMetadata.getCreationTime()).thenReturn(currentTime - MILLISECONDS.convert(1, TimeUnit.HOURS)); computer.onSegmentCommit(compactedDescriptor, compactedMetadata); assertEquals(computer.getSegmentRowsToSizeRatio(), 0.1); // uses post-commit rows/size ratio // Segment 2: Without compaction - Make it hit row threshold, not time threshold CommittingSegmentDescriptor traditionalDescriptor = mock(CommittingSegmentDescriptor.class); when(traditionalDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(traditionalDescriptor.getPreCommitRowCount()).thenReturn(-1); // No compaction data SegmentZKMetadata traditionalMetadata = mock(SegmentZKMetadata.class); when(traditionalMetadata.getTotalDocs()).thenReturn(30_000L); // Less than rows consumed to hit row threshold when(traditionalMetadata.getSizeThresholdToFlushSegment()).thenReturn(25_000); when(traditionalMetadata.getSegmentName()).thenReturn("segment2"); when(traditionalMetadata.getCreationTime()).thenReturn(currentTime - MILLISECONDS.convert(1, TimeUnit.HOURS)); computer.onSegmentCommit(traditionalDescriptor, traditionalMetadata); // Traditional segment uses post-commit rows/size; new ratio: 0.1*0.15 + 0.9*0.1 = 0.105 assertEquals(computer.getSegmentRowsToSizeRatio(), 0.105, 0.001); // Next threshold calculation should work correctly int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // (300,000 * 0.105) = 31,500 assertEquals(threshold, 31_500); } @Test public void testEdgeCaseZeroPreCommitRowsWithCompaction() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(0); // Actually zero pre-commit rows SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(25_000L); // Post-commit rows when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(20_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Should use post-commit rows (25,000) since pre-commit is 0 // (25,000 / 200,000) * 300,000 = 37,500 assertEquals(threshold, 37_500); assertEquals(computer.getSegmentRowsToSizeRatio(), 0.125); // 25,000 / 200,000 } @Test public void testEdgeCaseZeroPostCommitRowsWithPreCommitRows() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushAutotuneInitialRows()).thenReturn(50_000); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(200_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(100_000); // Has pre-commit data SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(0L); // Zero post-commit rows when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(80_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); // Should use pre-commit rows (100,000) since it's > 0, even though post-commit is 0 assertEquals(computer.getSegmentRowsToSizeRatio(), 0.5); // 100,000 / 200,000 int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Should not fall back to initial rows since we have valid ratio assertTrue(threshold > 50_000); // Should be significantly higher than initial rows } @Test public void testForceCommitSkipsRatioUpdateWithCommitTimeCompaction() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushAutotuneInitialRows()).thenReturn(25_000); CommittingSegmentDescriptor committingSegmentDescriptor = mock(CommittingSegmentDescriptor.class); when(committingSegmentDescriptor.getSegmentSizeBytes()).thenReturn(500_000L); when(committingSegmentDescriptor.getPreCommitRowCount()).thenReturn(100_000); // High pre-commit count when(committingSegmentDescriptor.getStopReason()).thenReturn(REASON_FORCE_COMMIT_MESSAGE_RECEIVED); SegmentZKMetadata committingSegmentZKMetadata = mock(SegmentZKMetadata.class); when(committingSegmentZKMetadata.getTotalDocs()).thenReturn(20_000L); // Low post-commit when(committingSegmentZKMetadata.getSizeThresholdToFlushSegment()).thenReturn(80_000); computer.onSegmentCommit(committingSegmentDescriptor, committingSegmentZKMetadata); // Ratio should remain 0 due to force commit, regardless of pre-commit data assertEquals(computer.getSegmentRowsToSizeRatio(), 0.0); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Should use the previous segment's threshold (80,000) assertEquals(threshold, 80_000); } @Test public void testSegmentSizeAdjustmentWithCommitTimeCompaction() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); StreamConfig streamConfig = mock(StreamConfig.class); when(streamConfig.getFlushThresholdSegmentSizeBytes()).thenReturn(300_000L); // Test case: segment size too big CommittingSegmentDescriptor largeSizeDescriptor = mock(CommittingSegmentDescriptor.class); when(largeSizeDescriptor.getSegmentSizeBytes()).thenReturn(500_000L); // Much larger than target when(largeSizeDescriptor.getPreCommitRowCount()).thenReturn(120_000); // Pre-commit count SegmentZKMetadata largeSizeMetadata = mock(SegmentZKMetadata.class); when(largeSizeMetadata.getTotalDocs()).thenReturn(24_000L); // Post-commit (80% compaction) when(largeSizeMetadata.getSizeThresholdToFlushSegment()).thenReturn(100_000); computer.onSegmentCommit(largeSizeDescriptor, largeSizeMetadata); int threshold = computer.computeThreshold(streamConfig, "newSegmentName"); // Should use pre-commit rows (120,000) and halve it due to large segment size // 120,000 / 2 = 60,000 assertEquals(threshold, 60_000); } @Test public void testCalculateSizeForCalculationWithPreCommitRows() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); // Test with valid pre-commit and post-commit data // Expected: (200_000 * 100_000) / 50_000 = 400_000 long result = computer.calculateSizeForCalculation(true, 100_000, 50_000, 200_000); assertEquals(result, 400_000L); // Test with pre-commit rows but zero post-commit rows (cannot estimate) // Expected: fallback to post-commit size result = computer.calculateSizeForCalculation(true, 100_000, 0, 200_000); assertEquals(result, 200_000L); // Test with high compression (90% reduction) // Expected: (50_000 * 1_000_000) / 100_000 = 500_000 result = computer.calculateSizeForCalculation(true, 1_000_000, 100_000, 50_000); assertEquals(result, 500_000L); } @Test public void testCalculateSizeForCalculationWithoutPreCommitRows() { SizeBasedSegmentFlushThresholdComputer computer = new SizeBasedSegmentFlushThresholdComputer(); // Test without using pre-commit rows long result = computer.calculateSizeForCalculation(false, 100_000, 50_000, 200_000); // Expected: post-commit size directly assertEquals(result, 200_000L); } }
apache/systemds
33,693
src/main/java/org/apache/sysds/runtime/instructions/fed/QuantilePickFEDInstruction.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.instructions.fed; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutableTriple; import org.apache.commons.lang3.tuple.Pair; import org.apache.sysds.common.Opcodes; import org.apache.sysds.hops.fedplanner.FTypes.FType; import org.apache.sysds.lops.PickByCount.OperationTypes; import org.apache.sysds.runtime.DMLRuntimeException; import org.apache.sysds.runtime.controlprogram.caching.CacheableData; import org.apache.sysds.runtime.controlprogram.caching.FrameObject; import org.apache.sysds.runtime.controlprogram.caching.MatrixObject; import org.apache.sysds.runtime.controlprogram.context.ExecutionContext; import org.apache.sysds.runtime.controlprogram.federated.FederatedData; import org.apache.sysds.runtime.controlprogram.federated.FederatedRange; import org.apache.sysds.runtime.controlprogram.federated.FederatedRequest; import org.apache.sysds.runtime.controlprogram.federated.FederatedResponse; import org.apache.sysds.runtime.controlprogram.federated.FederatedUDF; import org.apache.sysds.runtime.controlprogram.federated.FederationMap; import org.apache.sysds.runtime.controlprogram.federated.FederationUtils; import org.apache.sysds.runtime.frame.data.FrameBlock; import org.apache.sysds.runtime.instructions.InstructionUtils; import org.apache.sysds.runtime.instructions.cp.CPOperand; import org.apache.sysds.runtime.instructions.cp.Data; import org.apache.sysds.runtime.instructions.cp.DoubleObject; import org.apache.sysds.runtime.instructions.cp.QuantilePickCPInstruction; import org.apache.sysds.runtime.instructions.cp.ScalarObject; import org.apache.sysds.runtime.instructions.spark.QuantilePickSPInstruction; import org.apache.sysds.runtime.lineage.LineageItem; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.matrix.operators.Operator; @SuppressWarnings("unchecked") public class QuantilePickFEDInstruction extends BinaryFEDInstruction { private final OperationTypes _type; public QuantilePickFEDInstruction(Operator op, CPOperand in, CPOperand out, OperationTypes type, boolean inmem, String opcode, String istr) { this(op, in, null, out, type, inmem, opcode, istr); } public QuantilePickFEDInstruction(Operator op, CPOperand in, CPOperand in2, CPOperand out, OperationTypes type, boolean inmem, String opcode, String istr, FederatedOutput fedOut) { super(FEDType.QPick, op, in, in2, out, opcode, istr, fedOut); _type = type; } public QuantilePickFEDInstruction(Operator op, CPOperand in, CPOperand in2, CPOperand out, OperationTypes type, boolean inmem, String opcode, String istr) { this(op, in, in2, out, type, inmem, opcode, istr, FederatedOutput.NONE); } public static QuantilePickFEDInstruction parseInstruction(QuantilePickCPInstruction instr) { return new QuantilePickFEDInstruction(instr.getOperator(), instr.input1, instr.input2, instr.output, instr.getOperationType(), instr.isInMem(), instr.getOpcode(), instr.getInstructionString()); } public static QuantilePickFEDInstruction parseInstruction(QuantilePickSPInstruction instr) { return new QuantilePickFEDInstruction(instr.getOperator(), instr.input1, instr.input2, instr.output, instr.getOperationType(), false, instr.getOpcode(), instr.getInstructionString()); } public static QuantilePickFEDInstruction parseInstruction ( String str ) { String[] parts = InstructionUtils.getInstructionPartsWithValueType(str); String opcode = parts[0]; if ( !opcode.equalsIgnoreCase(Opcodes.QPICK.toString()) ) throw new DMLRuntimeException("Unknown opcode while parsing a QuantilePickCPInstruction: " + str); FederatedOutput fedOut = FederatedOutput.valueOf(parts[parts.length-1]); QuantilePickFEDInstruction inst = null; //instruction parsing if( parts.length == 5 ) { //instructions of length 5 originate from unary - mr-iqm CPOperand in1 = new CPOperand(parts[1]); CPOperand in2 = new CPOperand(parts[2]); CPOperand out = new CPOperand(parts[3]); OperationTypes ptype = OperationTypes.IQM; boolean inmem = false; inst = new QuantilePickFEDInstruction(null, in1, in2, out, ptype, inmem, opcode, str); } else if( parts.length == 6 ) { CPOperand in1 = new CPOperand(parts[1]); CPOperand out = new CPOperand(parts[2]); OperationTypes ptype = OperationTypes.valueOf(parts[3]); boolean inmem = Boolean.parseBoolean(parts[4]); inst = new QuantilePickFEDInstruction(null, in1, out, ptype, inmem, opcode, str); } else if( parts.length == 7 ) { CPOperand in1 = new CPOperand(parts[1]); CPOperand in2 = new CPOperand(parts[2]); CPOperand out = new CPOperand(parts[3]); OperationTypes ptype = OperationTypes.valueOf(parts[4]); boolean inmem = Boolean.parseBoolean(parts[5]); inst = new QuantilePickFEDInstruction(null, in1, in2, out, ptype, inmem, opcode, str); } if ( inst != null ) inst._fedOut = fedOut; return inst; } @Override public void processInstruction(ExecutionContext ec) { if(ec.getMatrixObject(input1).isFederated(FType.COL) || ec.getMatrixObject(input1).isFederated(FType.FULL)) processColumnQPick(ec); else processRowQPick(ec); } public <T> MatrixBlock getEquiHeightBins(ExecutionContext ec, int colID, double[] quantiles) { FrameObject inFrame = ec.getFrameObject(input1); FederationMap frameFedMap = inFrame.getFedMapping(); // Create vector MatrixObject in = ExecutionContext.createMatrixObject(new MatrixBlock((int) inFrame.getNumRows(), 1, false)); long varID = FederationUtils.getNextFedDataID(); ec.setVariable(String.valueOf(varID), in); // modify map here List<FederatedRange> ranges = new ArrayList<>(); FederationMap oldFedMap = frameFedMap.mapParallel(varID, (range, data) -> { try { int colIDWorker = colID; if(colID >= range.getBeginDims()[1] && colID < range.getEndDims()[1]) { if(range.getBeginDims()[1] > 1) colIDWorker = colID - (int) range.getBeginDims()[1]; FederatedResponse response = data.executeFederatedOperation( new FederatedRequest(FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.CreateMatrixFromFrame(data.getVarID(), varID, colIDWorker))).get(); synchronized(ranges) { ranges.add(range); } if(!response.isSuccessful()) response.throwExceptionFromResponse(); } } catch(Exception e) { throw new DMLRuntimeException(e); } return null; }); //create one column federated object List<Pair<FederatedRange, FederatedData>> newFedMapPairs = new ArrayList<>(); for(Pair<FederatedRange, FederatedData> mapPair : oldFedMap.getMap()) { for(FederatedRange r : ranges) { if(mapPair.getLeft().equals(r)) { newFedMapPairs.add(mapPair); } } } FederationMap newFedMap = new FederationMap(varID, newFedMapPairs, FType.COL); // construct a federated matrix with the encoded data in.getDataCharacteristics().setDimension(in.getNumRows(),1); in.setFedMapping(newFedMap); // Find min and max List<double[]> minMax = new ArrayList<>(); newFedMap.mapParallel(varID, (range, data) -> { try { FederatedResponse response = data.executeFederatedOperation(new FederatedRequest( FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.MinMax(data.getVarID()))).get(); if(!response.isSuccessful()) response.throwExceptionFromResponse(); double[] rangeMinMax = (double[]) response.getData()[0]; minMax.add(rangeMinMax); return null; } catch(Exception e) { throw new DMLRuntimeException(e); } }); // Find weights sum, min and max double globalMin = Double.MAX_VALUE, globalMax = Double.MIN_VALUE, vectorLength = inFrame.getNumColumns() == 2 ? 0 : inFrame.getNumRows(); for(double[] values : minMax) { globalMin = Math.min(globalMin, values[0]); globalMax = Math.max(globalMax, values[1]); } // If multiple quantiles take first histogram and reuse bins, otherwise recursively get bin with result int numBuckets = 256; // (int) Math.round(in.getNumRows() / 2.0); T ret = createHistogram(in, (int) vectorLength, globalMin, globalMax, numBuckets, -1, false); // Compute and set results MatrixBlock quantileValues = computeMultipleQuantiles(ec, in, (int[]) ret, quantiles, (int) vectorLength, varID, (globalMax-globalMin) / numBuckets, globalMin, _type, true); ec.removeVariable(String.valueOf(varID)); // Add min to the result MatrixBlock res = new MatrixBlock(quantileValues.getNumRows() + 1, 1, false); res.set(0,0, globalMin); res.copy(1, quantileValues.getNumRows(), 0, 0, quantileValues,false); return res; } public <T> void processRowQPick(ExecutionContext ec) { MatrixObject in = ec.getMatrixObject(input1); FederationMap fedMap = in.getFedMapping(); boolean average = _type == OperationTypes.MEDIAN; double[] quantiles = input2 != null ? (input2.isMatrix() ? ec.getMatrixInput(input2).getDenseBlockValues() : input2.isScalar() ? new double[] {ec.getScalarInput(input2).getDoubleValue()} : null) : (average ? new double[] {0.5} : _type == OperationTypes.IQM ? new double[] {0.25, 0.75} : null); if (input2 != null && input2.isMatrix()) ec.releaseMatrixInput(input2.getName()); // Find min and max long varID = FederationUtils.getNextFedDataID(); List<double[]> minMax = new ArrayList<>(); fedMap.mapParallel(varID, (range, data) -> { try { FederatedResponse response = data.executeFederatedOperation(new FederatedRequest( FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.MinMax(data.getVarID()))).get(); if(!response.isSuccessful()) response.throwExceptionFromResponse(); double[] rangeMinMax = (double[]) response.getData()[0]; minMax.add(rangeMinMax); return null; } catch(Exception e) { throw new DMLRuntimeException(e); } }); // Find weights sum, min and max double globalMin = Double.MAX_VALUE, globalMax = Double.MIN_VALUE, vectorLength = in.getNumColumns() == 2 ? 0 : in.getNumRows(), sumWeights = 0.0; for(double[] values : minMax) { globalMin = Math.min(globalMin, values[0]); globalMax = Math.max(globalMax, values[1]); if(in.getNumColumns() == 2) vectorLength += values[2]; sumWeights += values[3]; } // Average for median average = average && (in.getNumColumns() == 2 ? sumWeights : in.getNumRows()) % 2 == 0; // If multiple quantiles take first histogram and reuse bins, otherwise recursively get bin with result int numBuckets = 256; // (int) Math.round(in.getNumRows() / 2.0); int quantileIndex = quantiles != null && quantiles.length == 1 ? (int) Math.round(vectorLength * quantiles[0]) : -1; T ret = createHistogram(in, (int) vectorLength, globalMin, globalMax, numBuckets, quantileIndex, average); // Compute and set results if(quantiles != null && quantiles.length > 1) { double finalVectorLength = vectorLength; quantiles = Arrays.stream(quantiles).map(val -> (int) Math.round(finalVectorLength * val)).toArray(); computeMultipleQuantiles(ec, in, (int[]) ret, quantiles, (int) vectorLength, varID, (globalMax-globalMin) / numBuckets, globalMin, _type, false); } else getSingleQuantileResult(ret, ec, fedMap, varID, average, false, (int) vectorLength, null); } private <T> MatrixBlock computeMultipleQuantiles(ExecutionContext ec, MatrixObject in, int[] bucketsFrequencies, double[] quantiles, int vectorLength, long varID, double bucketRange, double min, OperationTypes type, boolean returnOutput) { MatrixBlock out = new MatrixBlock(quantiles.length, 1, false); ImmutableTriple<Integer, Integer, ImmutablePair<Double, Double>>[] bucketsWithIndex = new ImmutableTriple[quantiles.length]; // Find bins with each quantile for first histogram int sizeBeforeTmp = 0, sizeBefore = 0, countFoundBins = 0; for(int j = 0; j < bucketsFrequencies.length; j++) { sizeBeforeTmp += bucketsFrequencies[j]; for(int i = 0; i < quantiles.length; i++) { ImmutablePair<Double, Double> bucketWithQ; if(quantiles[i] > sizeBefore && quantiles[i] <= sizeBeforeTmp) { bucketWithQ = new ImmutablePair<>(min + (j * bucketRange), min + ((j+1) * bucketRange)); bucketsWithIndex[i] = new ImmutableTriple<>(quantiles[i] == 1 ? 1 : (int) quantiles[i] - sizeBefore, bucketsFrequencies[j], bucketWithQ); countFoundBins++; } } sizeBefore = sizeBeforeTmp; if(countFoundBins == quantiles.length) break; } // Find each quantile bin recursively Map<Integer, T> retBuckets = new HashMap<>(); double q25Left = 0, q25Right = 0, q75Left = 0, q75Right = 0; for(int i = 0; i < bucketsWithIndex.length; i++) { int nextNumBuckets = bucketsWithIndex[i].middle < 100 ? bucketsWithIndex[i].middle * 2 : (int) Math.round(bucketsWithIndex[i].middle / 2.0); T hist = createHistogram(in, vectorLength, bucketsWithIndex[i].right.left, bucketsWithIndex[i].right.right, nextNumBuckets, bucketsWithIndex[i].left, false); if(_type == OperationTypes.IQM) { q25Right = i == 0 ? hist instanceof ImmutablePair ? ((ImmutablePair<Double, Double>)hist).right : (Double) hist : q25Right; q25Left = i == 0 ? hist instanceof ImmutablePair ? ((ImmutablePair<Double, Double>)hist).left : (Double) hist : q25Left; q75Right = i == 1 ? hist instanceof ImmutablePair ? ((ImmutablePair<Double, Double>)hist).right : (Double) hist : q75Right; q75Left = i == 1 ? hist instanceof ImmutablePair ? ((ImmutablePair<Double, Double>)hist).left : (Double) hist : q75Left; } else { if(hist instanceof ImmutablePair) retBuckets.put(i, hist); // set value if returned double instead of bin else out.set(i, 0, (Double) hist); } } if(type == OperationTypes.IQM) { ImmutablePair<Double, Double> IQMRange = new ImmutablePair<>(q25Right, q75Right); if(q25Right == q75Right) ec.setScalarOutput(output.getName(), new DoubleObject(q25Left)); else getSingleQuantileResult(IQMRange, ec, in.getFedMapping(), varID, false, true, vectorLength, new ImmutablePair<>(q25Left, q75Left)); } else { if(!retBuckets.isEmpty()) { // Search for values within bucket range where it as returned in.getFedMapping().mapParallel(varID, (range, data) -> { try { FederatedResponse response = data.executeFederatedOperation(new FederatedRequest( FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.GetValuesInRanges(data.getVarID(), quantiles.length, (HashMap<Integer, ImmutablePair<Double, Double>>) retBuckets))).get(); if(!response.isSuccessful()) response.throwExceptionFromResponse(); // Add results by row MatrixBlock tmp = (MatrixBlock) response.getData()[0]; synchronized(out) { out.binaryOperationsInPlace(InstructionUtils.parseBinaryOperator(Opcodes.PLUS.toString()), tmp); } return null; } catch(Exception e) { throw new DMLRuntimeException(e); } }); } if(returnOutput) return out; else ec.setMatrixOutput(output.getName(), out); } return null; } private <T> void getSingleQuantileResult(T ret, ExecutionContext ec, FederationMap fedMap, long varID, boolean average, boolean isIQM, int vectorLength, ImmutablePair<Double, Double> iqmRange) { double result = 0.0, q25Part = 0, q25Val = 0, q75Val = 0, q75Part = 0; if(ret instanceof ImmutablePair) { // Search for values within bucket range List<Double> values = new ArrayList<>(); List<double[]> iqmValues = new ArrayList<>(); fedMap.mapParallel(varID, (range, data) -> { try { FederatedResponse response = data.executeFederatedOperation(new FederatedRequest(FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.GetValuesInRange(data.getVarID(), (ImmutablePair<Double, Double>) ret, isIQM, iqmRange))).get(); if(!response.isSuccessful()) response.throwExceptionFromResponse(); if(isIQM) iqmValues.add((double[]) response.getData()[0]); else values.add((double) response.getData()[0]); return null; } catch(Exception e) { throw new DMLRuntimeException(e); } }); if(isIQM) { for(double[] vals : iqmValues) { result += vals[0]; q25Part += vals[1]; q25Val += vals[2]; q75Part += vals[3]; q75Val += vals[4]; } q25Part -= (0.25 * vectorLength); q75Part -= (0.75 * vectorLength); } else result = values.stream().reduce(0.0, Double::sum); } else result = (Double) ret; result = average ? result / 2 : (isIQM ? ((result + q25Part*q25Val - q75Part*q75Val) / (vectorLength * 0.5)) : result); ec.setScalarOutput(output.getName(), new DoubleObject(result)); } public <T> T createHistogram(CacheableData<?> in, int vectorLength, double globalMin, double globalMax, int numBuckets, int quantileIndex, boolean average) { FederationMap fedMap = in.getFedMapping(); List<int[]> hists = new ArrayList<>(); List<Set<Double>> distincts = new ArrayList<>(); double bucketRange = (globalMax-globalMin) / numBuckets; boolean isEvenNumRows = vectorLength % 2 == 0; // Create histograms long varID = FederationUtils.getNextFedDataID(); fedMap.mapParallel(varID, (range, data) -> { try { FederatedResponse response = data.executeFederatedOperation(new FederatedRequest( FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.GetHistogram(data.getVarID(), globalMin, globalMax, bucketRange, numBuckets))).get(); if(!response.isSuccessful()) response.throwExceptionFromResponse(); int[] rangeHist = (int[]) response.getData()[0]; hists.add(rangeHist); Set<Double> rangeDistinct = (Set<Double>) response.getData()[1]; distincts.add(rangeDistinct); return null; } catch(Exception e) { throw new DMLRuntimeException(e); } }); // Merge results into one histogram int[] bucketsFrequencies = new int[numBuckets]; for(int[] hist : hists) for(int i = 0; i < hist.length; i++) bucketsFrequencies[i] += hist[i]; if(quantileIndex == -1) return (T) bucketsFrequencies; // Find bucket with quantile ImmutableTriple<Integer, Integer, ImmutablePair<Double, Double>> bucketWithIndex = getBucketWithIndex(bucketsFrequencies, globalMin, quantileIndex, average, isEvenNumRows, bucketRange); // Check if can terminate Set<Double> distinctValues = distincts.stream().flatMap(Set::stream).collect(Collectors.toSet()); if(distinctValues.size() > quantileIndex-1 && !average) return (T) distinctValues.stream().sorted().toArray()[quantileIndex > 0 ? quantileIndex-1 : 0]; if(average && distinctValues.size() > quantileIndex) { Double[] distinctsSorted = distinctValues.stream().flatMap(Stream::of).sorted().toArray(Double[]::new); Double medianSum = Double.sum(distinctsSorted[quantileIndex-1], distinctsSorted[quantileIndex]); return (T) medianSum; } if((average && distinctValues.size() == 2) || (!average && distinctValues.size() == 1)) return (T) distinctValues.stream().reduce(0.0, Double::sum); ImmutablePair<Double, Double> finalBucketWithQ = bucketWithIndex.right; List<Double> distinctInNewBucket = distinctValues.stream().filter( e -> e >= finalBucketWithQ.left && e <= finalBucketWithQ.right).collect(Collectors.toList()); if((distinctInNewBucket.size() == 1 && !average) || (average && distinctInNewBucket.size() == 2)) return (T) distinctInNewBucket.stream().reduce(0.0, Double::sum); if(!average) { Set<Double> distinctsSet = new HashSet<>(distinctInNewBucket); if(distinctsSet.size() == 1) return (T) distinctsSet.toArray()[0]; } if(distinctValues.size() == 1 || (bucketWithIndex.middle == 1 && !average) || (bucketWithIndex.middle == 2 && isEvenNumRows && average) || globalMin == globalMax) return (T) bucketWithIndex.right; int nextNumBuckets = bucketWithIndex.middle < 100 ? bucketWithIndex.middle * 2 : (int) Math.round(bucketWithIndex.middle / 2.0); // Add more bins to not stuck if(numBuckets == nextNumBuckets && globalMin == bucketWithIndex.right.left && globalMax == bucketWithIndex.right.right) { nextNumBuckets *= 2; } return createHistogram(in, vectorLength, bucketWithIndex.right.left, bucketWithIndex.right.right, nextNumBuckets, bucketWithIndex.left, average); } private ImmutableTriple<Integer, Integer, ImmutablePair<Double, Double>> getBucketWithIndex(int[] bucketFrequencies, double min, int quantileIndex, boolean average, boolean isEvenNumRows, double bucketRange) { int sizeBeforeTmp = 0, sizeBefore = 0, bucketWithQSize = 0; ImmutablePair<Double, Double> bucketWithQ = null; double tmpBinLeft = min; for(int i = 0; i < bucketFrequencies.length; i++) { sizeBeforeTmp += bucketFrequencies[i]; if(quantileIndex <= sizeBeforeTmp && bucketWithQSize == 0) { bucketWithQ = new ImmutablePair<>(tmpBinLeft, tmpBinLeft + bucketRange); bucketWithQSize = bucketFrequencies[i]; sizeBeforeTmp -= bucketWithQSize; sizeBefore = sizeBeforeTmp; if(!average || sizeBefore + bucketWithQSize >= quantileIndex + 1) break; } else if(quantileIndex + 1 <= sizeBeforeTmp + bucketWithQSize && isEvenNumRows && average) { // Add right bin that contains second index int bucket2Size = bucketFrequencies[i]; if (bucket2Size != 0) { bucketWithQ = new ImmutablePair<>(bucketWithQ.left, tmpBinLeft + bucketRange); bucketWithQSize += bucket2Size; break; } } tmpBinLeft += bucketRange; } quantileIndex = quantileIndex == 1 ? 1 : quantileIndex - sizeBefore; return new ImmutableTriple<>(quantileIndex, bucketWithQSize, bucketWithQ); } public static class CreateMatrixFromFrame extends FederatedUDF { private static final long serialVersionUID = -6569370318237863595L; private final long _outputID; private final int _id; public CreateMatrixFromFrame(long input, long output, int id) { super(new long[] {input}); _outputID = output; _id = id; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { FrameBlock fb = ((FrameObject) data[0]).acquireReadAndRelease(); double[] colData = ArrayUtils.toPrimitive(Arrays.stream((Object[]) fb.getColumnData(_id)).map(e -> Double.valueOf(String.valueOf(e))).toArray(Double[] :: new)); MatrixBlock mbout = new MatrixBlock(fb.getNumRows(), 1, colData); // create output matrix object MatrixObject mo = ExecutionContext.createMatrixObject(mbout); // add it to the list of variables ec.setVariable(String.valueOf(_outputID), mo); // return id handle return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS_EMPTY); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } public static class GetHistogram extends FederatedUDF { private static final long serialVersionUID = 5413355823424777742L; private final double _max; private final double _min; private final double _range; private final int _numBuckets; private GetHistogram(long input, double min, double max, double range, int numBuckets) { super(new long[] {input}); _max = max; _min = min; _range = range; _numBuckets = numBuckets; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { MatrixBlock mb = ((MatrixObject) data[0]).acquireReadAndRelease(); double[] values = mb.getDenseBlockValues(); boolean isWeighted = mb.getNumColumns() == 2; Set<Double> distinct = new HashSet<>(); int[] frequencies = new int[_numBuckets]; // binning for(int i = 0; i < values.length - (isWeighted ? 1 : 0); i += (isWeighted ? 2 : 1)) { double val = values[i]; int weight = isWeighted ? (int) values[i+1] : 1; int index = (int) (Math.ceil((val - _min) / _range)); index = index == 0 ? 0 : index - 1; if (val >= _min && val <= _max) { frequencies[index] += weight; distinct.add(val); } } Object[] ret = new Object[] {frequencies, distinct.size() < 3 ? distinct : new HashSet<>()}; return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, ret); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } public static class GetValuesInRanges extends FederatedUDF { private static final long serialVersionUID = 8663298932616139153L; private final int _numQuantiles; private final HashMap<Integer, ImmutablePair<Double, Double>> _ranges; private GetValuesInRanges(long input,int numQuantiles, HashMap<Integer, ImmutablePair<Double, Double>> ranges) { super(new long[] {input}); _ranges = ranges; _numQuantiles = numQuantiles; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { MatrixBlock mb = ((MatrixObject) data[0]).acquireReadAndRelease(); double[] values = mb.getDenseBlockValues(); MatrixBlock res = new MatrixBlock(_numQuantiles, 1, false); for(double val : values) { for(Map.Entry<Integer, ImmutablePair<Double, Double>> entry : _ranges.entrySet()) { // Find value within computed bin if(entry.getValue().left <= val && val <= entry.getValue().right) { res.set(entry.getKey(), 0,val); break; } } } return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, res); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } public static class MinMax extends FederatedUDF { private static final long serialVersionUID = -3906698363866500744L; private MinMax(long input) { super(new long[] {input}); } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { MatrixBlock mb = ((MatrixObject) data[0]).acquireReadAndRelease(); double[] ret = new double[]{mb.getNumColumns() == 2 ? mb.colMin().get(0, 0) : mb.min(), mb.getNumColumns() == 2 ? mb.colMax().get(0, 0) : mb.max(), mb.getNumColumns() == 2 ? mb.colSum().get(0, 1) : 0, mb.getNumColumns() == 2 ? mb.sumWeightForQuantile() : 0}; return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, ret); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } public void processColumnQPick(ExecutionContext ec) { MatrixObject in = ec.getMatrixObject(input1); FederationMap fedMapping = in.getFedMapping(); List <Object> res = new ArrayList<>(); long varID = FederationUtils.getNextFedDataID(); fedMapping.mapParallel(varID, (range, data) -> { FederatedResponse response; try { switch( _type ) { case VALUEPICK: if(input2.isScalar()) { ScalarObject quantile = ec.getScalarInput(input2); response = data.executeFederatedOperation( new FederatedRequest(FederatedRequest.RequestType.EXEC_UDF,-1, new QuantilePickFEDInstruction.ValuePick(data.getVarID(), quantile))).get(); } else { MatrixBlock quantiles = ec.getMatrixInput(input2.getName()); response = data.executeFederatedOperation( new FederatedRequest(FederatedRequest.RequestType.EXEC_UDF,-1, new QuantilePickFEDInstruction.ValuePick(data.getVarID(), quantiles))).get(); } break; case IQM: response = data .executeFederatedOperation( new FederatedRequest(FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.ColIQM(data.getVarID()))).get(); break; case MEDIAN: response = data .executeFederatedOperation(new FederatedRequest(FederatedRequest.RequestType.EXEC_UDF, -1, new QuantilePickFEDInstruction.ColMedian(data.getVarID()))).get(); break; default: throw new DMLRuntimeException("Unsupported qpick operation type: "+_type); } if(!response.isSuccessful()) response.throwExceptionFromResponse(); res.add(response.getData()[0]); } catch(Exception e) { throw new DMLRuntimeException(e); } return null; }); assert res.size() == 1; if (input2 != null && input2.isMatrix()) ec.releaseMatrixInput(input2.getName()); if(output.isScalar()) ec.setScalarOutput(output.getName(), new DoubleObject((double) res.get(0))); else ec.setMatrixOutput(output.getName(), (MatrixBlock) res.get(0)); } private static class ValuePick extends FederatedUDF { private static final long serialVersionUID = -2594912886841345102L; private final MatrixBlock _quantiles; protected ValuePick(long input, ScalarObject quantile) { super(new long[] {input}); _quantiles = new MatrixBlock(quantile.getDoubleValue()); } protected ValuePick(long input, MatrixBlock quantiles) { super(new long[] {input}); _quantiles = quantiles; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { MatrixBlock mb = ((MatrixObject)data[0]).acquireReadAndRelease(); MatrixBlock picked; if (_quantiles.getLength() == 1) { return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, new Object[] {mb.pickValue(_quantiles.get(0, 0))}); } else { picked = mb.pickValues(_quantiles, new MatrixBlock()); return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, new Object[] {picked}); } } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } public static class GetValuesInRange extends FederatedUDF { private static final long serialVersionUID = 5413355823424777742L; private final ImmutablePair<Double, Double> _range; private final ImmutablePair<Double, Double> _iqmRange; private final boolean _sumInRange; private GetValuesInRange(long input, ImmutablePair<Double, Double> range, boolean sumInRange, ImmutablePair<Double, Double> iqmRange) { super(new long[] {input}); _range = range; _sumInRange = sumInRange; _iqmRange = iqmRange; } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { MatrixBlock mb = ((MatrixObject) data[0]).acquireReadAndRelease(); double[] values = mb.getDenseBlockValues(); boolean isWeighted = mb.getNumColumns() == 2; double res = 0.0; int counter = 0; double q25Part = 0, q25Val = 0, q75Val = 0, q75Part = 0; for(int i = 0; i < values.length - (isWeighted ? 1 : 0); i += (isWeighted ? 2 : 1)) { // get value within computed bin // different conditions for IQM and simple QPICK double val = values[i]; int weight = isWeighted ? (int) values[i+1] : 1; if(_iqmRange != null && val <= _iqmRange.left) { q25Part += weight; } if(_iqmRange != null && val >= _iqmRange.left && val <= _range.left) { q25Val = val; } else if(_iqmRange != null && val <= _iqmRange.right && val >= _range.right) q75Val = val; if((!_sumInRange && _range.left <= val && val <= _range.right) || (_sumInRange && _range.left < val && val <= _range.right)) { res += (val * (!_sumInRange && weight > 1 ? 2 : weight)); counter += weight; } if(_iqmRange != null && val <= _range.right) q75Part += weight; if(!_sumInRange && counter > 2) break; } return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS,!_sumInRange ? res : new double[]{res, q25Part, q25Val, q75Part, q75Val}); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } private static class ColIQM extends FederatedUDF { private static final long serialVersionUID = 2223186699111957677L; protected ColIQM(long input) { super(new long[] {input}); } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { MatrixBlock mb = ((MatrixObject)data[0]).acquireReadAndRelease(); return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, new Object[] {mb.interQuartileMean()}); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } private static class ColMedian extends FederatedUDF { private static final long serialVersionUID = -2808597461054603816L; protected ColMedian(long input) { super(new long[] {input}); } @Override public FederatedResponse execute(ExecutionContext ec, Data... data) { MatrixBlock mb = ((MatrixObject)data[0]).acquireReadAndRelease(); return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, new Object[] {mb.median()}); } @Override public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) { return null; } } }
apache/incubator-kie-optaplanner
35,820
core/optaplanner-core-impl/src/test/java/org/optaplanner/core/api/solver/SolverManagerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.optaplanner.core.api.solver; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatIllegalStateException; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.fail; import static org.optaplanner.core.api.solver.SolverStatus.NOT_SOLVING; import static org.optaplanner.core.api.solver.SolverStatus.SOLVING_ACTIVE; import static org.optaplanner.core.api.solver.SolverStatus.SOLVING_SCHEDULED; import static org.optaplanner.core.impl.testdata.util.PlannerAssert.assertSolutionInitialized; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.optaplanner.core.api.score.director.ScoreDirector; import org.optaplanner.core.config.constructionheuristic.ConstructionHeuristicPhaseConfig; import org.optaplanner.core.config.localsearch.LocalSearchPhaseConfig; import org.optaplanner.core.config.phase.PhaseConfig; import org.optaplanner.core.config.phase.custom.CustomPhaseConfig; import org.optaplanner.core.config.solver.SolverConfig; import org.optaplanner.core.config.solver.SolverManagerConfig; import org.optaplanner.core.config.solver.termination.TerminationConfig; import org.optaplanner.core.impl.testdata.domain.TestdataEntity; import org.optaplanner.core.impl.testdata.domain.TestdataSolution; import org.optaplanner.core.impl.testdata.domain.TestdataValue; import org.optaplanner.core.impl.testdata.domain.extended.TestdataUnannotatedExtendedSolution; import org.optaplanner.core.impl.testdata.util.PlannerTestUtils; class SolverManagerTest { private SolverManager<TestdataSolution, Long> solverManager; @AfterEach void closeSolverManager() { if (solverManager != null) { solverManager.close(); } } @Test void create() { SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class); SolverManager.create(solverConfig).close(); SolverManagerConfig solverManagerConfig = new SolverManagerConfig(); SolverManager.create(solverConfig, solverManagerConfig).close(); SolverFactory<TestdataSolution> solverFactory = SolverFactory.create(solverConfig); SolverManager.create(solverFactory).close(); SolverManager.create(solverFactory, solverManagerConfig).close(); } @Test @Timeout(60) void solveBatch_2InParallel() throws ExecutionException, InterruptedException { SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(createPhaseWithConcurrentSolvingStart(2), new ConstructionHeuristicPhaseConfig()); solverManager = SolverManager.create( solverConfig, new SolverManagerConfig().withParallelSolverCount("2")); SolverJob<TestdataSolution, Long> solverJob1 = solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1")); SolverJob<TestdataSolution, Long> solverJob2 = solverManager.solve(2L, PlannerTestUtils.generateTestdataSolution("s2")); assertSolutionInitialized(solverJob1.getFinalBestSolution()); assertSolutionInitialized(solverJob2.getFinalBestSolution()); } private CustomPhaseConfig createPhaseWithConcurrentSolvingStart(int barrierPartiesCount) { CyclicBarrier barrier = new CyclicBarrier(barrierPartiesCount); return new CustomPhaseConfig().withCustomPhaseCommands( scoreDirector -> { try { barrier.await(); } catch (InterruptedException | BrokenBarrierException e) { fail("Cyclic barrier failed."); } }); } @Test @Timeout(60) void getSolverStatus() throws InterruptedException, BrokenBarrierException, ExecutionException { CyclicBarrier solverThreadReadyBarrier = new CyclicBarrier(2); CyclicBarrier mainThreadReadyBarrier = new CyclicBarrier(2); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(new CustomPhaseConfig().withCustomPhaseCommands( scoreDirector -> { try { solverThreadReadyBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { fail("Cyclic barrier failed."); } try { mainThreadReadyBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { fail("Cyclic barrier failed."); } }), new ConstructionHeuristicPhaseConfig()); // Only 1 solver can run at the same time to predict the solver status of each job. solverManager = SolverManager.create( solverConfig, new SolverManagerConfig().withParallelSolverCount("1")); SolverJob<TestdataSolution, Long> solverJob1 = solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1")); solverThreadReadyBarrier.await(); SolverJob<TestdataSolution, Long> solverJob2 = solverManager.solve(2L, PlannerTestUtils.generateTestdataSolution("s2")); assertThat(solverManager.getSolverStatus(1L)).isEqualTo(SOLVING_ACTIVE); assertThat(solverJob1.getSolverStatus()).isEqualTo(SOLVING_ACTIVE); assertThat(solverManager.getSolverStatus(2L)).isEqualTo(SOLVING_SCHEDULED); assertThat(solverJob2.getSolverStatus()).isEqualTo(SOLVING_SCHEDULED); mainThreadReadyBarrier.await(); solverThreadReadyBarrier.await(); assertThat(solverManager.getSolverStatus(1L)).isEqualTo(NOT_SOLVING); assertThat(solverJob1.getSolverStatus()).isEqualTo(NOT_SOLVING); assertThat(solverManager.getSolverStatus(2L)).isEqualTo(SOLVING_ACTIVE); assertThat(solverJob2.getSolverStatus()).isEqualTo(SOLVING_ACTIVE); mainThreadReadyBarrier.await(); solverJob1.getFinalBestSolution(); solverJob2.getFinalBestSolution(); assertThat(solverManager.getSolverStatus(1L)).isEqualTo(NOT_SOLVING); assertThat(solverJob1.getSolverStatus()).isEqualTo(NOT_SOLVING); assertThat(solverManager.getSolverStatus(2L)).isEqualTo(NOT_SOLVING); assertThat(solverJob2.getSolverStatus()).isEqualTo(NOT_SOLVING); } @Test @Timeout(60) void exceptionInSolver() { SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(new CustomPhaseConfig().withCustomPhaseCommands( scoreDirector -> { throw new IllegalStateException("exceptionInSolver"); })); solverManager = SolverManager.create( solverConfig, new SolverManagerConfig().withParallelSolverCount("1")); AtomicInteger exceptionCount = new AtomicInteger(); SolverJob<TestdataSolution, Long> solverJob1 = solverManager.solve(1L, problemId -> PlannerTestUtils.generateTestdataSolution("s1"), null, (problemId, throwable) -> exceptionCount.incrementAndGet()); assertThatThrownBy(solverJob1::getFinalBestSolution) .isInstanceOf(ExecutionException.class) .hasRootCauseMessage("exceptionInSolver"); assertThat(exceptionCount.get()).isEqualTo(1); assertThat(solverManager.getSolverStatus(1L)).isEqualTo(NOT_SOLVING); assertThat(solverJob1.getSolverStatus()).isEqualTo(NOT_SOLVING); } @Test @Timeout(60) void exceptionInConsumer() throws InterruptedException { SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(new ConstructionHeuristicPhaseConfig()); solverManager = SolverManager.create( solverConfig, new SolverManagerConfig().withParallelSolverCount("1")); CountDownLatch consumerInvoked = new CountDownLatch(1); AtomicReference<Throwable> errorInConsumer = new AtomicReference<>(); SolverJob<TestdataSolution, Long> solverJob1 = solverManager.solve(1L, problemId -> PlannerTestUtils.generateTestdataSolution("s1"), bestSolution -> { throw new IllegalStateException("exceptionInConsumer"); }, (problemId, throwable) -> { errorInConsumer.set(throwable); consumerInvoked.countDown(); }); consumerInvoked.await(); assertThat(errorInConsumer.get()) .isInstanceOf(IllegalStateException.class) .hasMessage("exceptionInConsumer"); // Accessing the job's final best solution is necessary to guarantee that the solver is no longer solving. assertThatCode(solverJob1::getFinalBestSolution).doesNotThrowAnyException(); // Otherwise, the following assertion could fail. assertThat(solverManager.getSolverStatus(1L)).isEqualTo(NOT_SOLVING); assertThat(solverJob1.getSolverStatus()).isEqualTo(NOT_SOLVING); } @Test @Timeout(60) void solveGenerics() throws ExecutionException, InterruptedException { SolverConfig solverConfig = PlannerTestUtils .buildSolverConfig(TestdataSolution.class, TestdataEntity.class); solverManager = SolverManager .create(solverConfig, new SolverManagerConfig()); BiConsumer<Object, Object> exceptionHandler = (o1, o2) -> fail("Solving failed."); Consumer<Object> finalBestSolutionConsumer = o -> { }; Function<Object, TestdataUnannotatedExtendedSolution> problemFinder = o -> new TestdataUnannotatedExtendedSolution( PlannerTestUtils.generateTestdataSolution("s1")); SolverJob<TestdataSolution, Long> solverJob = solverManager.solve(1L, problemFinder, finalBestSolutionConsumer, exceptionHandler); solverJob.getFinalBestSolution(); } @Test @Timeout(60) void skipAhead() throws ExecutionException, InterruptedException { CountDownLatch latch = new CountDownLatch(1); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(new CustomPhaseConfig().withCustomPhaseCommands( (ScoreDirector<TestdataSolution> scoreDirector) -> { TestdataSolution solution = scoreDirector.getWorkingSolution(); TestdataEntity entity = solution.getEntityList().get(0); scoreDirector.beforeVariableChanged(entity, "value"); entity.setValue(solution.getValueList().get(0)); scoreDirector.afterVariableChanged(entity, "value"); scoreDirector.triggerVariableListeners(); }, (ScoreDirector<TestdataSolution> scoreDirector) -> { TestdataSolution solution = scoreDirector.getWorkingSolution(); TestdataEntity entity = solution.getEntityList().get(1); scoreDirector.beforeVariableChanged(entity, "value"); entity.setValue(solution.getValueList().get(1)); scoreDirector.afterVariableChanged(entity, "value"); scoreDirector.triggerVariableListeners(); }, (ScoreDirector<TestdataSolution> scoreDirector) -> { TestdataSolution solution = scoreDirector.getWorkingSolution(); TestdataEntity entity = solution.getEntityList().get(2); scoreDirector.beforeVariableChanged(entity, "value"); entity.setValue(solution.getValueList().get(2)); scoreDirector.afterVariableChanged(entity, "value"); scoreDirector.triggerVariableListeners(); }, (ScoreDirector<TestdataSolution> scoreDirector) -> { // In the next best solution event, both e1 and e2 are definitely not null (but e3 might be). latch.countDown(); TestdataSolution solution = scoreDirector.getWorkingSolution(); TestdataEntity entity = solution.getEntityList().get(3); scoreDirector.beforeVariableChanged(entity, "value"); entity.setValue(solution.getValueList().get(3)); scoreDirector.afterVariableChanged(entity, "value"); scoreDirector.triggerVariableListeners(); })); solverManager = SolverManager.create( solverConfig, new SolverManagerConfig().withParallelSolverCount("1")); AtomicInteger bestSolutionCount = new AtomicInteger(); AtomicInteger finalBestSolutionCount = new AtomicInteger(); AtomicReference<Throwable> consumptionError = new AtomicReference<>(); CountDownLatch finalBestSolutionConsumed = new CountDownLatch(1); SolverJob<TestdataSolution, Long> solverJob1 = solverManager.solveAndListen(1L, problemId -> PlannerTestUtils.generateTestdataSolution("s1", 4), bestSolution -> { boolean isFirstReceivedSolution = bestSolutionCount.incrementAndGet() == 1; if (bestSolution.getEntityList().get(1).getValue() == null) { // This best solution may be skipped as well. try { latch.await(); } catch (InterruptedException e) { fail("Latch failed."); } } else if (bestSolution.getEntityList().get(2).getValue() == null && !isFirstReceivedSolution) { fail("No skip ahead occurred: both e2 and e3 are null in a best solution event."); } }, finalBestSolution -> { finalBestSolutionCount.incrementAndGet(); finalBestSolutionConsumed.countDown(); }, (problemId, throwable) -> consumptionError.set(throwable)); assertSolutionInitialized(solverJob1.getFinalBestSolution()); // EventCount can be 2 or 3, depending on the race, but it can never be 4. assertThat(bestSolutionCount).hasValueLessThan(4); finalBestSolutionConsumed.await(); assertThat(finalBestSolutionCount.get()).isEqualTo(1); if (consumptionError.get() != null) { fail("Error in the best solution consumer.", consumptionError.get()); } } @Test @Timeout(600) void terminateEarly() throws InterruptedException, BrokenBarrierException { CyclicBarrier startedBarrier = new CyclicBarrier(2); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withTerminationConfig(new TerminationConfig()) .withPhases(new CustomPhaseConfig().withCustomPhaseCommands((scoreDirector) -> { try { startedBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { throw new IllegalStateException("The startedBarrier failed.", e); } }), new ConstructionHeuristicPhaseConfig(), new LocalSearchPhaseConfig()); solverManager = SolverManager.create( solverConfig, new SolverManagerConfig().withParallelSolverCount("1")); SolverJob<TestdataSolution, Long> solverJob1 = solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1", 4)); SolverJob<TestdataSolution, Long> solverJob2 = solverManager.solve(2L, PlannerTestUtils.generateTestdataSolution("s2", 4)); SolverJob<TestdataSolution, Long> solverJob3 = solverManager.solve(3L, PlannerTestUtils.generateTestdataSolution("s3", 4)); // Give solver 1 enough time to start startedBarrier.await(); assertThat(solverManager.getSolverStatus(1L)).isEqualTo(SOLVING_ACTIVE); assertThat(solverJob1.getSolverStatus()).isEqualTo(SOLVING_ACTIVE); assertThat(solverManager.getSolverStatus(2L)).isEqualTo(SOLVING_SCHEDULED); assertThat(solverJob2.getSolverStatus()).isEqualTo(SOLVING_SCHEDULED); assertThat(solverManager.getSolverStatus(3L)).isEqualTo(SOLVING_SCHEDULED); assertThat(solverJob3.getSolverStatus()).isEqualTo(SOLVING_SCHEDULED); // Terminate solver 2 before it begins solverManager.terminateEarly(2L); assertThat(solverManager.getSolverStatus(1L)).isEqualTo(SOLVING_ACTIVE); assertThat(solverJob1.getSolverStatus()).isEqualTo(SOLVING_ACTIVE); assertThat(solverManager.getSolverStatus(2L)).isEqualTo(NOT_SOLVING); assertThat(solverJob2.getSolverStatus()).isEqualTo(NOT_SOLVING); assertThat(solverManager.getSolverStatus(3L)).isEqualTo(SOLVING_SCHEDULED); assertThat(solverJob3.getSolverStatus()).isEqualTo(SOLVING_SCHEDULED); // Terminate solver 1 while it is running, allowing solver 3 to start solverManager.terminateEarly(1L); assertThat(solverManager.getSolverStatus(1L)).isEqualTo(NOT_SOLVING); assertThat(solverJob1.getSolverStatus()).isEqualTo(NOT_SOLVING); // Give solver 3 enough time to start startedBarrier.await(); assertThat(solverManager.getSolverStatus(3L)).isEqualTo(SOLVING_ACTIVE); assertThat(solverJob3.getSolverStatus()).isEqualTo(SOLVING_ACTIVE); // Terminate solver 3 while it is running solverManager.terminateEarly(3L); assertThat(solverManager.getSolverStatus(3L)).isEqualTo(NOT_SOLVING); assertThat(solverJob3.getSolverStatus()).isEqualTo(NOT_SOLVING); } /** * Tests whether SolverManager can solve on multiple threads problems that use multiple thread counts. */ @Disabled("https://issues.redhat.com/browse/PLANNER-1837") @Test @Timeout(60) void solveMultipleThreadedMovesWithSolverManager_allGetSolved() throws ExecutionException, InterruptedException { int processCount = Runtime.getRuntime().availableProcessors(); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(new ConstructionHeuristicPhaseConfig(), new LocalSearchPhaseConfig()) // .withTerminationConfig(new TerminationConfig().withSecondsSpentLimit(4L)) // Adds moveThreadCount to the solver config. .withMoveThreadCount("AUTO"); // Creates solverManagerConfig with multiple threads. solverManager = SolverManager.create(solverConfig, new SolverManagerConfig()); List<SolverJob<TestdataSolution, Long>> jobs = new ArrayList<>(); for (long i = 0; i < processCount; i++) { jobs.add(solverManager.solve(i, PlannerTestUtils.generateTestdataSolution("s" + i, 10))); } assertInitializedJobs(jobs); } private void assertInitializedJobs(List<SolverJob<TestdataSolution, Long>> jobs) throws InterruptedException, ExecutionException { for (SolverJob<TestdataSolution, Long> job : jobs) { // Method getFinalBestSolution() waits for the solving to finish, therefore it ensures synchronization. assertSolutionInitialized(job.getFinalBestSolution()); } } @Test @Timeout(60) void submitMoreProblemsThanCpus_allGetSolved() throws InterruptedException, ExecutionException { // Use twice the amount of problems than available processors. int problemCount = Runtime.getRuntime().availableProcessors() * 2; solverManager = createSolverManagerTestableByDifferentConsumers(); assertSolveWithoutConsumer(problemCount, solverManager); assertSolveWithConsumer(problemCount, solverManager, true); assertSolveWithConsumer(problemCount, solverManager, false); } private SolverManager<TestdataSolution, Long> createSolverManagerTestableByDifferentConsumers() { List<PhaseConfig> phaseConfigList = IntStream.of(0, 1) .mapToObj((x) -> new CustomPhaseConfig().withCustomPhaseCommands( (ScoreDirector<TestdataSolution> scoreDirector) -> { TestdataSolution solution = scoreDirector.getWorkingSolution(); TestdataEntity entity = solution.getEntityList().get(x); scoreDirector.beforeVariableChanged(entity, "value"); entity.setValue(solution.getValueList().get(x)); scoreDirector.afterVariableChanged(entity, "value"); scoreDirector.triggerVariableListeners(); })) .collect(Collectors.toList()); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(phaseConfigList.toArray(new PhaseConfig[0])); SolverManagerConfig solverManagerConfig = new SolverManagerConfig(); return SolverManager.create(solverConfig, solverManagerConfig); } private void assertSolveWithoutConsumer(int problemCount, SolverManager<TestdataSolution, Long> solverManager) throws InterruptedException, ExecutionException { List<SolverJob<TestdataSolution, Long>> jobs = new ArrayList<>(problemCount); for (long id = 0; id < problemCount; id++) { jobs.add(solverManager.solve(id, PlannerTestUtils.generateTestdataSolution(String.format("s%d", id)))); } assertInitializedJobs(jobs); } private void assertSolveWithConsumer( int problemCount, SolverManager<TestdataSolution, Long> solverManager, boolean listenWhileSolving) throws ExecutionException, InterruptedException { // Two solutions should be created for every problem. Map<Long, List<TestdataSolution>> solutionMap = new HashMap<>(problemCount * 2); CountDownLatch finalBestSolutionConsumed = new CountDownLatch(problemCount); List<SolverJob<TestdataSolution, Long>> jobs = new ArrayList<>(problemCount); for (long id = 0; id < problemCount; id++) { List<TestdataSolution> consumedBestSolutions = Collections.synchronizedList(new ArrayList<>()); String solutionName = String.format("s%d", id); if (listenWhileSolving) { jobs.add(solverManager.solveAndListen( id, problemId -> PlannerTestUtils.generateTestdataSolution(solutionName, 2), consumedBestSolutions::add, (finalBestSolution) -> { finalBestSolutionConsumed.countDown(); }, null)); } else { jobs.add(solverManager.solve( id, problemId -> PlannerTestUtils.generateTestdataSolution(solutionName, 2), (finalBestSolution) -> { consumedBestSolutions.add(finalBestSolution); finalBestSolutionConsumed.countDown(); }, null)); } solutionMap.put(id, consumedBestSolutions); } assertInitializedJobs(jobs); finalBestSolutionConsumed.await(); // Wait till all final best solutions have been consumed. if (listenWhileSolving) { assertConsumedSolutionsWithListeningWhileSolving(solutionMap); } else { assertConsumedSolutions(solutionMap); } } private void assertConsumedSolutions(Map<Long, List<TestdataSolution>> consumedSolutions) { for (List<TestdataSolution> consumedSolution : consumedSolutions.values()) { assertThat(consumedSolution).hasSize(1); assertConsumedFinalBestSolution(consumedSolution.get(0)); } } private void assertConsumedSolutionsWithListeningWhileSolving(Map<Long, List<TestdataSolution>> consumedSolutions) { consumedSolutions.forEach((problemId, bestSolutions) -> { if (bestSolutions.size() == 2) { assertConsumedFirstBestSolution(bestSolutions.get(0)); assertConsumedFinalBestSolution(bestSolutions.get(1)); } else if (bestSolutions.size() == 1) { // The fist best solution has been skipped. assertConsumedFinalBestSolution(bestSolutions.get(0)); } else { fail("Unexpected number of received best solutions (" + bestSolutions.size() + "). Should be either 1 or 2."); } }); } private void assertConsumedFinalBestSolution(TestdataSolution solution) { TestdataEntity entity = solution.getEntityList().get(0); assertThat(entity.getCode()).isEqualTo("e1"); assertThat(entity.getValue().getCode()).isEqualTo("v1"); entity = solution.getEntityList().get(1); assertThat(entity.getCode()).isEqualTo("e2"); assertThat(entity.getValue().getCode()).isEqualTo("v2"); } private void assertConsumedFirstBestSolution(TestdataSolution solution) { TestdataEntity entity = solution.getEntityList().get(0); assertThat(entity.getCode()).isEqualTo("e1"); assertThat(entity.getValue().getCode()).isEqualTo("v1"); entity = solution.getEntityList().get(1); assertThat(entity.getCode()).isEqualTo("e2"); assertThat(entity.getValue()).isNull(); } @Test @Timeout(60) void runSameIdProcesses_throwsIllegalStateException() { SolverManagerConfig solverManagerConfig = new SolverManagerConfig(); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(createPhaseWithConcurrentSolvingStart(2)); solverManager = SolverManager.create(solverConfig, solverManagerConfig); solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1")); assertThatThrownBy(() -> solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1"))) .isInstanceOf(IllegalStateException.class).hasMessageContaining("already solving"); } @Test @Timeout(60) void addProblemChange() throws InterruptedException, ExecutionException { SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class); solverConfig.setDaemon(true); solverManager = SolverManager.create(solverConfig); final long problemId = 1L; final int entityAndValueCount = 4; AtomicReference<TestdataSolution> bestSolution = new AtomicReference<>(); solverManager.solveAndListen(problemId, id -> PlannerTestUtils.generateTestdataSolution("s1", entityAndValueCount), bestSolution::set); CompletableFuture<Void> futureChange = solverManager .addProblemChange(problemId, (workingSolution, problemChangeDirector) -> { problemChangeDirector.addProblemFact(new TestdataValue("addedValue"), workingSolution.getValueList()::add); }); futureChange.get(); assertThat(futureChange).isCompleted(); assertThat(bestSolution.get().getValueList()).hasSize(entityAndValueCount + 1); } @Test @Timeout(60) void addProblemChangeToNonExistingProblem_failsFast() { SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class); solverManager = SolverManager.create(solverConfig); solverManager.solveAndListen(1L, id -> PlannerTestUtils.generateTestdataSolution("s1", 4), testdataSolution -> { }); final long nonExistingProblemId = 999L; assertThatIllegalStateException() .isThrownBy(() -> solverManager.addProblemChange(nonExistingProblemId, (workingSolution, problemChangeDirector) -> problemChangeDirector.addProblemFact( new TestdataValue("addedValue"), workingSolution.getValueList()::add))) .withMessageContaining(String.valueOf(nonExistingProblemId)); } @Test @Timeout(60) void addProblemChangeToWaitingSolver() throws InterruptedException, ExecutionException { CountDownLatch solvingPausedLatch = new CountDownLatch(1); PhaseConfig<?> pausedPhaseConfig = new CustomPhaseConfig().withCustomPhaseCommands( scoreDirector -> { try { solvingPausedLatch.await(); } catch (InterruptedException e) { fail("CountDownLatch failed."); } }); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(pausedPhaseConfig, new ConstructionHeuristicPhaseConfig()); // Allow only a single active solver. SolverManagerConfig solverManagerConfig = new SolverManagerConfig().withParallelSolverCount("1"); solverManager = SolverManager.create(solverConfig, solverManagerConfig); // The first solver waits until the test sends a problem change. solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1", 4)); // The second solver is scheduled and waits for the fist solver to finish. final long secondProblemId = 2L; final int entityAndValueCount = 4; AtomicReference<TestdataSolution> bestSolution = new AtomicReference<>(); solverManager.solveAndListen(secondProblemId, id -> PlannerTestUtils.generateTestdataSolution("s2", entityAndValueCount), bestSolution::set); CompletableFuture<Void> futureChange = solverManager .addProblemChange(secondProblemId, (workingSolution, problemChangeDirector) -> { problemChangeDirector.addProblemFact(new TestdataValue("addedValue"), workingSolution.getValueList()::add); }); // The first solver can proceed. When it finishes, the second solver starts solving and picks up the change. solvingPausedLatch.countDown(); futureChange.get(); assertThat(futureChange).isCompleted(); assertThat(bestSolution.get().getValueList()).hasSize(entityAndValueCount + 1); } @Test @Timeout(60) void terminateSolverJobEarly_stillReturnsBestSolution() throws ExecutionException, InterruptedException { SolverManagerConfig solverManagerConfig = new SolverManagerConfig(); CountDownLatch solvingStartedLatch = new CountDownLatch(1); PhaseConfig<?> pausedPhaseConfig = new CustomPhaseConfig().withCustomPhaseCommands( scoreDirector -> solvingStartedLatch.countDown()); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(pausedPhaseConfig, new ConstructionHeuristicPhaseConfig()); solverManager = SolverManager.create(solverConfig, solverManagerConfig); SolverJob<TestdataSolution, Long> solverJob = solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1")); solvingStartedLatch.await(); solverJob.terminateEarly(); TestdataSolution result = solverJob.getFinalBestSolution(); assertThat(result).isNotNull(); assertThat(solverJob.isTerminatedEarly()).isTrue(); } @Test @Timeout(60) void terminateScheduledSolverJobEarly_returnsInputProblem() throws ExecutionException, InterruptedException { CountDownLatch solvingPausedLatch = new CountDownLatch(1); PhaseConfig<?> pausedPhaseConfig = new CustomPhaseConfig().withCustomPhaseCommands( scoreDirector -> { try { solvingPausedLatch.await(); } catch (InterruptedException e) { fail("CountDownLatch failed."); } }); SolverConfig solverConfig = PlannerTestUtils.buildSolverConfig(TestdataSolution.class, TestdataEntity.class) .withPhases(pausedPhaseConfig, new ConstructionHeuristicPhaseConfig()); // Allow only a single active solver. SolverManagerConfig solverManagerConfig = new SolverManagerConfig().withParallelSolverCount("1"); solverManager = SolverManager.create(solverConfig, solverManagerConfig); // The first solver waits. solverManager.solve(1L, PlannerTestUtils.generateTestdataSolution("s1", 4)); TestdataSolution inputProblem = PlannerTestUtils.generateTestdataSolution("s2", 4); SolverJob<TestdataSolution, Long> solverJob = solverManager.solve(2L, inputProblem); solverJob.terminateEarly(); TestdataSolution result = solverJob.getFinalBestSolution(); assertThat(result).isSameAs(inputProblem); assertThat(solverJob.isTerminatedEarly()).isTrue(); } }
googleapis/google-cloud-java
35,468
java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/SecurityBulletin.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1/vulnerability.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v1; /** * * * <pre> * SecurityBulletin are notifications of vulnerabilities of Google products. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.SecurityBulletin} */ public final class SecurityBulletin extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.SecurityBulletin) SecurityBulletinOrBuilder { private static final long serialVersionUID = 0L; // Use SecurityBulletin.newBuilder() to construct. private SecurityBulletin(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SecurityBulletin() { bulletinId_ = ""; suggestedUpgradeVersion_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SecurityBulletin(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_SecurityBulletin_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_SecurityBulletin_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.SecurityBulletin.class, com.google.cloud.securitycenter.v1.SecurityBulletin.Builder.class); } private int bitField0_; public static final int BULLETIN_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object bulletinId_ = ""; /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bulletinId. */ @java.lang.Override public java.lang.String getBulletinId() { java.lang.Object ref = bulletinId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bulletinId_ = s; return s; } } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bytes for bulletinId. */ @java.lang.Override public com.google.protobuf.ByteString getBulletinIdBytes() { java.lang.Object ref = bulletinId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bulletinId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SUBMISSION_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp submissionTime_; /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return Whether the submissionTime field is set. */ @java.lang.Override public boolean hasSubmissionTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return The submissionTime. */ @java.lang.Override public com.google.protobuf.Timestamp getSubmissionTime() { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getSubmissionTimeOrBuilder() { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } public static final int SUGGESTED_UPGRADE_VERSION_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object suggestedUpgradeVersion_ = ""; /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The suggestedUpgradeVersion. */ @java.lang.Override public java.lang.String getSuggestedUpgradeVersion() { java.lang.Object ref = suggestedUpgradeVersion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); suggestedUpgradeVersion_ = s; return s; } } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The bytes for suggestedUpgradeVersion. */ @java.lang.Override public com.google.protobuf.ByteString getSuggestedUpgradeVersionBytes() { java.lang.Object ref = suggestedUpgradeVersion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); suggestedUpgradeVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bulletinId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bulletinId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getSubmissionTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(suggestedUpgradeVersion_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, suggestedUpgradeVersion_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bulletinId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bulletinId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getSubmissionTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(suggestedUpgradeVersion_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, suggestedUpgradeVersion_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1.SecurityBulletin)) { return super.equals(obj); } com.google.cloud.securitycenter.v1.SecurityBulletin other = (com.google.cloud.securitycenter.v1.SecurityBulletin) obj; if (!getBulletinId().equals(other.getBulletinId())) return false; if (hasSubmissionTime() != other.hasSubmissionTime()) return false; if (hasSubmissionTime()) { if (!getSubmissionTime().equals(other.getSubmissionTime())) return false; } if (!getSuggestedUpgradeVersion().equals(other.getSuggestedUpgradeVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + BULLETIN_ID_FIELD_NUMBER; hash = (53 * hash) + getBulletinId().hashCode(); if (hasSubmissionTime()) { hash = (37 * hash) + SUBMISSION_TIME_FIELD_NUMBER; hash = (53 * hash) + getSubmissionTime().hashCode(); } hash = (37 * hash) + SUGGESTED_UPGRADE_VERSION_FIELD_NUMBER; hash = (53 * hash) + getSuggestedUpgradeVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.SecurityBulletin parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.securitycenter.v1.SecurityBulletin prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * SecurityBulletin are notifications of vulnerabilities of Google products. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.SecurityBulletin} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.SecurityBulletin) com.google.cloud.securitycenter.v1.SecurityBulletinOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_SecurityBulletin_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_SecurityBulletin_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.SecurityBulletin.class, com.google.cloud.securitycenter.v1.SecurityBulletin.Builder.class); } // Construct using com.google.cloud.securitycenter.v1.SecurityBulletin.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSubmissionTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; bulletinId_ = ""; submissionTime_ = null; if (submissionTimeBuilder_ != null) { submissionTimeBuilder_.dispose(); submissionTimeBuilder_ = null; } suggestedUpgradeVersion_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_SecurityBulletin_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1.SecurityBulletin getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1.SecurityBulletin.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1.SecurityBulletin build() { com.google.cloud.securitycenter.v1.SecurityBulletin result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1.SecurityBulletin buildPartial() { com.google.cloud.securitycenter.v1.SecurityBulletin result = new com.google.cloud.securitycenter.v1.SecurityBulletin(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.securitycenter.v1.SecurityBulletin result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.bulletinId_ = bulletinId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.submissionTime_ = submissionTimeBuilder_ == null ? submissionTime_ : submissionTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.suggestedUpgradeVersion_ = suggestedUpgradeVersion_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1.SecurityBulletin) { return mergeFrom((com.google.cloud.securitycenter.v1.SecurityBulletin) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v1.SecurityBulletin other) { if (other == com.google.cloud.securitycenter.v1.SecurityBulletin.getDefaultInstance()) return this; if (!other.getBulletinId().isEmpty()) { bulletinId_ = other.bulletinId_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasSubmissionTime()) { mergeSubmissionTime(other.getSubmissionTime()); } if (!other.getSuggestedUpgradeVersion().isEmpty()) { suggestedUpgradeVersion_ = other.suggestedUpgradeVersion_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { bulletinId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getSubmissionTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { suggestedUpgradeVersion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object bulletinId_ = ""; /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bulletinId. */ public java.lang.String getBulletinId() { java.lang.Object ref = bulletinId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bulletinId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bytes for bulletinId. */ public com.google.protobuf.ByteString getBulletinIdBytes() { java.lang.Object ref = bulletinId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bulletinId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @param value The bulletinId to set. * @return This builder for chaining. */ public Builder setBulletinId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bulletinId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return This builder for chaining. */ public Builder clearBulletinId() { bulletinId_ = getDefaultInstance().getBulletinId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @param value The bytes for bulletinId to set. * @return This builder for chaining. */ public Builder setBulletinIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bulletinId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.Timestamp submissionTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> submissionTimeBuilder_; /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return Whether the submissionTime field is set. */ public boolean hasSubmissionTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return The submissionTime. */ public com.google.protobuf.Timestamp getSubmissionTime() { if (submissionTimeBuilder_ == null) { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } else { return submissionTimeBuilder_.getMessage(); } } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder setSubmissionTime(com.google.protobuf.Timestamp value) { if (submissionTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } submissionTime_ = value; } else { submissionTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder setSubmissionTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (submissionTimeBuilder_ == null) { submissionTime_ = builderForValue.build(); } else { submissionTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder mergeSubmissionTime(com.google.protobuf.Timestamp value) { if (submissionTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && submissionTime_ != null && submissionTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getSubmissionTimeBuilder().mergeFrom(value); } else { submissionTime_ = value; } } else { submissionTimeBuilder_.mergeFrom(value); } if (submissionTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder clearSubmissionTime() { bitField0_ = (bitField0_ & ~0x00000002); submissionTime_ = null; if (submissionTimeBuilder_ != null) { submissionTimeBuilder_.dispose(); submissionTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getSubmissionTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSubmissionTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getSubmissionTimeOrBuilder() { if (submissionTimeBuilder_ != null) { return submissionTimeBuilder_.getMessageOrBuilder(); } else { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getSubmissionTimeFieldBuilder() { if (submissionTimeBuilder_ == null) { submissionTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getSubmissionTime(), getParentForChildren(), isClean()); submissionTime_ = null; } return submissionTimeBuilder_; } private java.lang.Object suggestedUpgradeVersion_ = ""; /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The suggestedUpgradeVersion. */ public java.lang.String getSuggestedUpgradeVersion() { java.lang.Object ref = suggestedUpgradeVersion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); suggestedUpgradeVersion_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The bytes for suggestedUpgradeVersion. */ public com.google.protobuf.ByteString getSuggestedUpgradeVersionBytes() { java.lang.Object ref = suggestedUpgradeVersion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); suggestedUpgradeVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @param value The suggestedUpgradeVersion to set. * @return This builder for chaining. */ public Builder setSuggestedUpgradeVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } suggestedUpgradeVersion_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return This builder for chaining. */ public Builder clearSuggestedUpgradeVersion() { suggestedUpgradeVersion_ = getDefaultInstance().getSuggestedUpgradeVersion(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @param value The bytes for suggestedUpgradeVersion to set. * @return This builder for chaining. */ public Builder setSuggestedUpgradeVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); suggestedUpgradeVersion_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.SecurityBulletin) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.SecurityBulletin) private static final com.google.cloud.securitycenter.v1.SecurityBulletin DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.SecurityBulletin(); } public static com.google.cloud.securitycenter.v1.SecurityBulletin getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SecurityBulletin> PARSER = new com.google.protobuf.AbstractParser<SecurityBulletin>() { @java.lang.Override public SecurityBulletin parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SecurityBulletin> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SecurityBulletin> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1.SecurityBulletin getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,468
java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/SecurityBulletin.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v2/vulnerability.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v2; /** * * * <pre> * SecurityBulletin are notifications of vulnerabilities of Google products. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.SecurityBulletin} */ public final class SecurityBulletin extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.SecurityBulletin) SecurityBulletinOrBuilder { private static final long serialVersionUID = 0L; // Use SecurityBulletin.newBuilder() to construct. private SecurityBulletin(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SecurityBulletin() { bulletinId_ = ""; suggestedUpgradeVersion_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SecurityBulletin(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_SecurityBulletin_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_SecurityBulletin_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.SecurityBulletin.class, com.google.cloud.securitycenter.v2.SecurityBulletin.Builder.class); } private int bitField0_; public static final int BULLETIN_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object bulletinId_ = ""; /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bulletinId. */ @java.lang.Override public java.lang.String getBulletinId() { java.lang.Object ref = bulletinId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bulletinId_ = s; return s; } } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bytes for bulletinId. */ @java.lang.Override public com.google.protobuf.ByteString getBulletinIdBytes() { java.lang.Object ref = bulletinId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bulletinId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SUBMISSION_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp submissionTime_; /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return Whether the submissionTime field is set. */ @java.lang.Override public boolean hasSubmissionTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return The submissionTime. */ @java.lang.Override public com.google.protobuf.Timestamp getSubmissionTime() { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getSubmissionTimeOrBuilder() { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } public static final int SUGGESTED_UPGRADE_VERSION_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object suggestedUpgradeVersion_ = ""; /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The suggestedUpgradeVersion. */ @java.lang.Override public java.lang.String getSuggestedUpgradeVersion() { java.lang.Object ref = suggestedUpgradeVersion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); suggestedUpgradeVersion_ = s; return s; } } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The bytes for suggestedUpgradeVersion. */ @java.lang.Override public com.google.protobuf.ByteString getSuggestedUpgradeVersionBytes() { java.lang.Object ref = suggestedUpgradeVersion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); suggestedUpgradeVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bulletinId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bulletinId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getSubmissionTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(suggestedUpgradeVersion_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, suggestedUpgradeVersion_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bulletinId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bulletinId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getSubmissionTime()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(suggestedUpgradeVersion_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, suggestedUpgradeVersion_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v2.SecurityBulletin)) { return super.equals(obj); } com.google.cloud.securitycenter.v2.SecurityBulletin other = (com.google.cloud.securitycenter.v2.SecurityBulletin) obj; if (!getBulletinId().equals(other.getBulletinId())) return false; if (hasSubmissionTime() != other.hasSubmissionTime()) return false; if (hasSubmissionTime()) { if (!getSubmissionTime().equals(other.getSubmissionTime())) return false; } if (!getSuggestedUpgradeVersion().equals(other.getSuggestedUpgradeVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + BULLETIN_ID_FIELD_NUMBER; hash = (53 * hash) + getBulletinId().hashCode(); if (hasSubmissionTime()) { hash = (37 * hash) + SUBMISSION_TIME_FIELD_NUMBER; hash = (53 * hash) + getSubmissionTime().hashCode(); } hash = (37 * hash) + SUGGESTED_UPGRADE_VERSION_FIELD_NUMBER; hash = (53 * hash) + getSuggestedUpgradeVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.SecurityBulletin parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.securitycenter.v2.SecurityBulletin prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * SecurityBulletin are notifications of vulnerabilities of Google products. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.SecurityBulletin} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.SecurityBulletin) com.google.cloud.securitycenter.v2.SecurityBulletinOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_SecurityBulletin_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_SecurityBulletin_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.SecurityBulletin.class, com.google.cloud.securitycenter.v2.SecurityBulletin.Builder.class); } // Construct using com.google.cloud.securitycenter.v2.SecurityBulletin.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSubmissionTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; bulletinId_ = ""; submissionTime_ = null; if (submissionTimeBuilder_ != null) { submissionTimeBuilder_.dispose(); submissionTimeBuilder_ = null; } suggestedUpgradeVersion_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_SecurityBulletin_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v2.SecurityBulletin getDefaultInstanceForType() { return com.google.cloud.securitycenter.v2.SecurityBulletin.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v2.SecurityBulletin build() { com.google.cloud.securitycenter.v2.SecurityBulletin result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v2.SecurityBulletin buildPartial() { com.google.cloud.securitycenter.v2.SecurityBulletin result = new com.google.cloud.securitycenter.v2.SecurityBulletin(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.securitycenter.v2.SecurityBulletin result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.bulletinId_ = bulletinId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.submissionTime_ = submissionTimeBuilder_ == null ? submissionTime_ : submissionTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.suggestedUpgradeVersion_ = suggestedUpgradeVersion_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v2.SecurityBulletin) { return mergeFrom((com.google.cloud.securitycenter.v2.SecurityBulletin) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v2.SecurityBulletin other) { if (other == com.google.cloud.securitycenter.v2.SecurityBulletin.getDefaultInstance()) return this; if (!other.getBulletinId().isEmpty()) { bulletinId_ = other.bulletinId_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasSubmissionTime()) { mergeSubmissionTime(other.getSubmissionTime()); } if (!other.getSuggestedUpgradeVersion().isEmpty()) { suggestedUpgradeVersion_ = other.suggestedUpgradeVersion_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { bulletinId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getSubmissionTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { suggestedUpgradeVersion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object bulletinId_ = ""; /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bulletinId. */ public java.lang.String getBulletinId() { java.lang.Object ref = bulletinId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bulletinId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return The bytes for bulletinId. */ public com.google.protobuf.ByteString getBulletinIdBytes() { java.lang.Object ref = bulletinId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bulletinId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @param value The bulletinId to set. * @return This builder for chaining. */ public Builder setBulletinId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bulletinId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @return This builder for chaining. */ public Builder clearBulletinId() { bulletinId_ = getDefaultInstance().getBulletinId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * ID of the bulletin corresponding to the vulnerability. * </pre> * * <code>string bulletin_id = 1;</code> * * @param value The bytes for bulletinId to set. * @return This builder for chaining. */ public Builder setBulletinIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bulletinId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.Timestamp submissionTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> submissionTimeBuilder_; /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return Whether the submissionTime field is set. */ public boolean hasSubmissionTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> * * @return The submissionTime. */ public com.google.protobuf.Timestamp getSubmissionTime() { if (submissionTimeBuilder_ == null) { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } else { return submissionTimeBuilder_.getMessage(); } } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder setSubmissionTime(com.google.protobuf.Timestamp value) { if (submissionTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } submissionTime_ = value; } else { submissionTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder setSubmissionTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (submissionTimeBuilder_ == null) { submissionTime_ = builderForValue.build(); } else { submissionTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder mergeSubmissionTime(com.google.protobuf.Timestamp value) { if (submissionTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && submissionTime_ != null && submissionTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getSubmissionTimeBuilder().mergeFrom(value); } else { submissionTime_ = value; } } else { submissionTimeBuilder_.mergeFrom(value); } if (submissionTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public Builder clearSubmissionTime() { bitField0_ = (bitField0_ & ~0x00000002); submissionTime_ = null; if (submissionTimeBuilder_ != null) { submissionTimeBuilder_.dispose(); submissionTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getSubmissionTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSubmissionTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getSubmissionTimeOrBuilder() { if (submissionTimeBuilder_ != null) { return submissionTimeBuilder_.getMessageOrBuilder(); } else { return submissionTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : submissionTime_; } } /** * * * <pre> * Submission time of this Security Bulletin. * </pre> * * <code>.google.protobuf.Timestamp submission_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getSubmissionTimeFieldBuilder() { if (submissionTimeBuilder_ == null) { submissionTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getSubmissionTime(), getParentForChildren(), isClean()); submissionTime_ = null; } return submissionTimeBuilder_; } private java.lang.Object suggestedUpgradeVersion_ = ""; /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The suggestedUpgradeVersion. */ public java.lang.String getSuggestedUpgradeVersion() { java.lang.Object ref = suggestedUpgradeVersion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); suggestedUpgradeVersion_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return The bytes for suggestedUpgradeVersion. */ public com.google.protobuf.ByteString getSuggestedUpgradeVersionBytes() { java.lang.Object ref = suggestedUpgradeVersion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); suggestedUpgradeVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @param value The suggestedUpgradeVersion to set. * @return This builder for chaining. */ public Builder setSuggestedUpgradeVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } suggestedUpgradeVersion_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @return This builder for chaining. */ public Builder clearSuggestedUpgradeVersion() { suggestedUpgradeVersion_ = getDefaultInstance().getSuggestedUpgradeVersion(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * This represents a version that the cluster receiving this notification * should be upgraded to, based on its current version. For example, 1.15.0 * </pre> * * <code>string suggested_upgrade_version = 3;</code> * * @param value The bytes for suggestedUpgradeVersion to set. * @return This builder for chaining. */ public Builder setSuggestedUpgradeVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); suggestedUpgradeVersion_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.SecurityBulletin) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.SecurityBulletin) private static final com.google.cloud.securitycenter.v2.SecurityBulletin DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v2.SecurityBulletin(); } public static com.google.cloud.securitycenter.v2.SecurityBulletin getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SecurityBulletin> PARSER = new com.google.protobuf.AbstractParser<SecurityBulletin>() { @java.lang.Override public SecurityBulletin parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SecurityBulletin> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SecurityBulletin> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v2.SecurityBulletin getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/coherence
35,568
prj/coherence-core/src/main/java/com/tangosol/dev/assembler/Annotation.java
/* * Copyright (c) 2000, 2020, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * http://oss.oracle.com/licenses/upl. */ package com.tangosol.dev.assembler; import java.io.IOException; import java.io.DataInput; import java.io.DataOutput; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Vector; /** * Represents a Java Virtual Machine Annotation structure in * "RuntimeVisibleAnnotations", "RuntimeInvsibleAnnotations", * "RuntimeVisibleParameterAnnotations", * "RuntimeInvisibleParameterAnnotations", and "AnnotationDefault" * attributes. * * <p> * The Annotation structure is defined by the JDK 1.5 documentation as: * <p> * <code><pre> * Annotation * { * u2 type_index; * u2 num_element_value_pairs; * { * u2 element_name_index; * element_value value; * } element_value_pairs[num_element_value_pairs] * } * * element_value * { * u1 tag; * union * { * u2 const_value_index; * { * u2 type_name_index; * u2 const_name_index; * } enum_const_value; * u2 class_info_index; * annotation annotation_value; * { * u2 num_values; * element_value values[num_values]; * } array_value; * } value; * } * </pre></code> * * @author rhl 2008.09.23 */ public class Annotation extends VMStructure implements Constants { // ----- VMStructure operations ----------------------------------------- /** * Read the constant information from the stream. Since constants can be * inter-related, the dependencies are not derefenced until all constants * are disassembled; at that point, the constants are resolved using the * postdisassemble method. * * @param stream the stream implementing java.io.DataInput from which * to read the constant information * @param pool the constant pool for the class which does not yet * contain the constants referenced by this constant */ protected void disassemble(DataInput stream, ConstantPool pool) throws IOException { m_utfType = (UtfConstant) pool.getConstant(stream.readUnsignedShort()); int cElementValue = stream.readUnsignedShort(); for (int i = 0; i < cElementValue; i++) { UtfConstant utfElementName; AbstractElementValue elementValue; utfElementName = (UtfConstant) pool.getConstant(stream.readUnsignedShort()); elementValue = AbstractElementValue.loadElementValue(stream, pool); m_mapElementValue.put(utfElementName, elementValue); } } /** * The pre-assembly step collects the necessary entries for the constant * pool. During this step, all constants used by this VM structure and * any sub-structures are registered with (but not yet bound by position * in) the constant pool. * * @param pool the constant pool for the class which needs to be * populated with the constants required to build this * VM structure */ protected void preassemble(ConstantPool pool) { pool.registerConstant(m_utfType); for (Iterator iter = m_mapElementValue.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); ((UtfConstant) entry.getKey()).preassemble(pool); ((AbstractElementValue) entry.getValue()).preassemble(pool); } } /** * The assembly process assembles and writes the constant to the passed * output stream. * * @param stream the stream implementing java.io.DataOutput to which to * write the assembled constant * @param pool the constant pool for the class which by this point * contains the entire set of constants required to build * this VM structure */ protected void assemble(DataOutput stream, ConstantPool pool) throws IOException { stream.writeShort(pool.findConstant(m_utfType)); stream.writeShort(m_mapElementValue.size()); for (Iterator iter = m_mapElementValue.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); stream.writeShort(pool.findConstant((UtfConstant) entry.getKey())); ((AbstractElementValue) entry.getValue()).assemble(stream, pool); } } /** * Determine if the attribute has been modified. * * @return true if the attribute has been modified */ public boolean isModified() { for (Iterator iter = m_mapElementValue.values().iterator(); iter.hasNext(); ) { if (((AbstractElementValue) iter.next()).isModified()) { return true; } } return m_fModified; } /** * Reset the modified state of the VM structure. * <p> * This method must be overridden by sub-classes which do not maintain * the attribute as binary. */ protected void resetModified() { m_fModified = false; } // ----- AbstractAnnotation operations ---------------------------------- /** * Get the assembled size in bytes of this annotation structure. */ public int getSize() { int cBytes = 0; cBytes += 2; /* type_index */ cBytes += 2; /* num_element_value_pairs */ for (Iterator iter = m_mapElementValue.values().iterator(); iter.hasNext(); ) { cBytes += 2; /* element_name_index */ cBytes += ((AbstractElementValue) iter.next()).getSize(); } return cBytes; } // ----- accessors ------------------------------------------------------ /** * Get the type of this annotation. */ public UtfConstant getAnnotationType() { return m_utfType; } public void setAnnotationType(UtfConstant utfType) { m_utfType = utfType; m_fModified = true; } /** * Set an element value in this annotation structure. */ public void setElementValue(UtfConstant utfElementName, AbstractElementValue elementValue) { m_mapElementValue.put(utfElementName, elementValue); m_fModified = true; } /** * Get the element value associated with the element name in this * annotation structure, or null if the element does not exist. */ public AbstractElementValue getElementValue(UtfConstant utfElementName) { return (AbstractElementValue) m_mapElementValue.get(utfElementName); } /** * Get an Iterator of the element names in this annotation structure. */ public Iterator getElementNames() { return m_mapElementValue.keySet().iterator(); } /** * Clear the element values. */ public void clearElementValues() { m_mapElementValue.clear(); m_fModified = true; } // ----- data members --------------------------------------------------- /** * The name of this class. */ private static final String CLASS = "Annotation"; /** * The type of the annotation. */ private UtfConstant m_utfType; /** * The element-values. */ private HashMap m_mapElementValue = new HashMap(); /** * Has the annotation been modified? */ private boolean m_fModified; // ----- inner class: AbstractElementValue ------------------------------ /** * Represents an element_value structure used by annotation-related * attributes. */ public static abstract class AbstractElementValue extends VMStructure implements Constants { // ----- constructors ----------------------------------------------- protected AbstractElementValue(char cTag) { m_cTag = cTag; } protected static AbstractElementValue loadElementValue(DataInput stream, ConstantPool pool) throws IOException { AbstractElementValue elementValue = null; char cTag = (char) stream.readByte(); switch (cTag) { case TAGTYPE_BYTE: case TAGTYPE_CHAR: case TAGTYPE_DOUBLE: case TAGTYPE_FLOAT: case TAGTYPE_INT: case TAGTYPE_LONG: case TAGTYPE_SHORT: case TAGTYPE_BOOLEAN: case TAGTYPE_STRING: { /* primitive or string value */ elementValue = new ConstantElementValue(cTag); break; } case TAGTYPE_ENUM: { /* enum value */ elementValue = new EnumElementValue(); break; } case TAGTYPE_CLASS: { /* class value */ elementValue = new ClassElementValue(); break; } case TAGTYPE_ANNOTATION: { /* annotation value */ elementValue = new AnnotationElementValue(); break; } case TAGTYPE_ARRAY: { /* array value */ elementValue = new ArrayElementValue(); break; } default: { throw new IllegalArgumentException(CLASS + ".loadElementValue: unknown ElementValue tag type " + cTag); } } elementValue.disassemble(stream, pool); return elementValue; } // ----- VMStructure operations ------------------------------------- /** * The assembly process assembles and writes the structure to the passed * output stream, resolving any dependencies using the passed constant * pool. * * @param stream the stream implementing java.io.DataOutput to which to * write the assembled VM structure * @param pool the constant pool for the class which by this point * contains the entire set of constants required to build * this VM structure */ protected void assemble(DataOutput stream, ConstantPool pool) throws IOException { stream.writeByte((byte) m_cTag); } // ----- accessors -------------------------------------------------- /** * Determine if the attribute has been modified. * * @return true if the attribute has been modified */ public boolean isModified() { return m_fModified; } /** * Reset the modified state of the VM structure. */ protected void resetModified() { m_fModified = false; } /** * Get the assembled size in bytes of this element value structure. */ protected int getSize() { return 1; /* tag */ } // ----- constants -------------------------------------------------- /* byte element type */ public static final char TAGTYPE_BYTE = 'B'; /* char element type */ public static final char TAGTYPE_CHAR = 'C'; /* double element type */ public static final char TAGTYPE_DOUBLE = 'D'; /* float element type */ public static final char TAGTYPE_FLOAT = 'F'; /* int element type */ public static final char TAGTYPE_INT = 'I'; /* long element type */ public static final char TAGTYPE_LONG = 'J'; /* short element type */ public static final char TAGTYPE_SHORT = 'S'; /* boolean element type */ public static final char TAGTYPE_BOOLEAN = 'Z'; /* String element type */ public static final char TAGTYPE_STRING = 's'; /* enum constant element type */ public static final char TAGTYPE_ENUM = 'e'; /* class element type */ public static final char TAGTYPE_CLASS = 'c'; /* annotation element type */ public static final char TAGTYPE_ANNOTATION = '@'; /* array element type */ public static final char TAGTYPE_ARRAY = '['; // ----- data members ----------------------------------------------- /** * Tracks modification to this object. */ protected boolean m_fModified; /** * The ElementValue type. */ private char m_cTag; } /** * Represents a constant element value in an annotation structure. */ public static class ConstantElementValue extends AbstractElementValue { // ----- constructors ----------------------------------------------- /** * Construct a ConstantElementValue object. Used during disassembly. */ protected ConstantElementValue(char cType) { super(cType); } /** * Construct a ConstantElementValue object. */ public ConstantElementValue(char cType, Constant constValue) { super(cType); m_constValue = constValue; } // ----- accessors -------------------------------------------------- /** * Get the constant value */ public Constant getConstantValue() { return m_constValue; } /** * Set the constant value */ public void setConstantValue(Constant constValue) { m_constValue = constValue; m_fModified = true; } /** * Get the assembled size in bytes of this element value structure. */ protected int getSize() { return 2 + super.getSize(); /* const_value_index */ } // ----- VMStructure operations ------------------------------------- /** * The disassembly process reads the structure from the passed input * stream and uses the constant pool to dereference any constant * references. * * @param stream the stream implementing java.io.DataInput from which * to read the assembled VM structure * @param pool the constant pool for the class which contains any * constants referenced by this VM structure */ protected void disassemble(DataInput stream, ConstantPool pool) throws IOException { m_constValue = (Constant) pool.getConstant(stream.readUnsignedShort()); } /** * The pre-assembly step collects the necessary entries for the constant * pool. During this step, all constants used by this VM structure and * any sub-structures are registered with (but not yet bound by position * in) the constant pool. * * @param pool the constant pool for the class which needs to be * populated with the constants required to build this * VM structure */ protected void preassemble(ConstantPool pool) { pool.registerConstant(m_constValue); } /** * The assembly process assembles and writes the structure to the passed * output stream, resolving any dependencies using the passed constant * pool. * * @param stream the stream implementing java.io.DataOutput to which to * write the assembled VM structure * @param pool the constant pool for the class which by this point * contains the entire set of constants required to build * this VM structure */ protected void assemble(DataOutput stream, ConstantPool pool) throws IOException { super.assemble(stream, pool); stream.writeShort(pool.findConstant(m_constValue)); } // ----- data members ----------------------------------------------- /** * The constant value. */ private Constant m_constValue; } /** * Represents a class element value in an annotation structure. */ public static class ClassElementValue extends AbstractElementValue { // ----- constructors ----------------------------------------------- /** * Construct a ClassElementValue object. Used during disassembly. */ protected ClassElementValue() { super(TAGTYPE_CLASS); } /** * Construct a ClassElementValue object. */ public ClassElementValue(UtfConstant utfClassType) { super(TAGTYPE_CLASS); m_utfClassType = utfClassType; } // ----- accessors -------------------------------------------------- /** * Get the class type */ public UtfConstant getClassType() { return m_utfClassType; } /** * Set the class type */ public void setClassType(UtfConstant utfClassType) { m_utfClassType = utfClassType; m_fModified = true; } /** * Get the assembled size in bytes of this element value structure. */ protected int getSize() { return 2 + super.getSize(); /* class_info_index */ } // ----- VMStructure operations ------------------------------------- /** * The disassembly process reads the structure from the passed input * stream and uses the constant pool to dereference any constant * references. * * @param stream the stream implementing java.io.DataInput from which * to read the assembled VM structure * @param pool the constant pool for the class which contains any * constants referenced by this VM structure */ protected void disassemble(DataInput stream, ConstantPool pool) throws IOException { m_utfClassType = (UtfConstant) pool.getConstant(stream.readUnsignedShort()); } /** * The pre-assembly step collects the necessary entries for the constant * pool. During this step, all constants used by this VM structure and * any sub-structures are registered with (but not yet bound by position * in) the constant pool. * * @param pool the constant pool for the class which needs to be * populated with the constants required to build this * VM structure */ protected void preassemble(ConstantPool pool) { pool.registerConstant(m_utfClassType); } /** * The assembly process assembles and writes the structure to the passed * output stream, resolving any dependencies using the passed constant * pool. * * @param stream the stream implementing java.io.DataOutput to which to * write the assembled VM structure * @param pool the constant pool for the class which by this point * contains the entire set of constants required to build * this VM structure */ protected void assemble(DataOutput stream, ConstantPool pool) throws IOException { super.assemble(stream, pool); stream.writeShort(pool.findConstant(m_utfClassType)); } // ----- data members ----------------------------------------------- /** * The class type . */ private UtfConstant m_utfClassType; } /** * Represents an enum value in an annotation structure. */ public static class EnumElementValue extends AbstractElementValue { // ----- constructors ----------------------------------------------- /** * Construct a EnumElementValue object. Used during disassembly. */ protected EnumElementValue() { super(TAGTYPE_ENUM); } /** * Construct a EnumElementValue object. */ public EnumElementValue(UtfConstant utfEnumName, UtfConstant utfEnumType) { super(TAGTYPE_ENUM); m_utfEnumName = utfEnumName; m_utfEnumType = utfEnumType; } // ----- accessors -------------------------------------------------- /** * Get the enum name. */ public UtfConstant getEnumName() { return m_utfEnumName; } /** * Set the enum name. */ public void setEnumName(UtfConstant utfEnumName) { m_utfEnumName = utfEnumName; m_fModified = true; } /** * Get the enum type. */ public UtfConstant getEnumType() { return m_utfEnumType; } /** * Set the enum type. */ public void setEnumType(UtfConstant utfEnumType) { m_utfEnumType = utfEnumType; m_fModified = true; } /** * Get the assembled size in bytes of this element value structure. */ protected int getSize() { return 4 + super.getSize(); /* type_name_index, const_name_index */ } // ----- VMStructure operations ------------------------------------- /** * The disassembly process reads the structure from the passed input * stream and uses the constant pool to dereference any constant * references. * * @param stream the stream implementing java.io.DataInput from which * to read the assembled VM structure * @param pool the constant pool for the class which contains any * constants referenced by this VM structure */ protected void disassemble(DataInput stream, ConstantPool pool) throws IOException { m_utfEnumType = (UtfConstant) pool.getConstant(stream.readUnsignedShort()); m_utfEnumName = (UtfConstant) pool.getConstant(stream.readUnsignedShort()); } /** * The pre-assembly step collects the necessary entries for the constant * pool. During this step, all constants used by this VM structure and * any sub-structures are registered with (but not yet bound by position * in) the constant pool. * * @param pool the constant pool for the class which needs to be * populated with the constants required to build this * VM structure */ protected void preassemble(ConstantPool pool) { pool.registerConstant(m_utfEnumType); pool.registerConstant(m_utfEnumName); } /** * The assembly process assembles and writes the structure to the passed * output stream, resolving any dependencies using the passed constant * pool. * * @param stream the stream implementing java.io.DataOutput to which to * write the assembled VM structure * @param pool the constant pool for the class which by this point * contains the entire set of constants required to build * this VM structure */ protected void assemble(DataOutput stream, ConstantPool pool) throws IOException { super.assemble(stream, pool); stream.writeShort(pool.findConstant(m_utfEnumType)); stream.writeShort(pool.findConstant(m_utfEnumName)); } // ----- data members ----------------------------------------------- /** * The enum name. */ private UtfConstant m_utfEnumName; /** * The enum type. */ private UtfConstant m_utfEnumType; } /** * Represents an annotation element value in an annotation structure. */ public static class AnnotationElementValue extends AbstractElementValue { // ----- constructors ----------------------------------------------- /** * Construct a AnnotationElementValue object. Used during disassembly. */ protected AnnotationElementValue() { super(TAGTYPE_ANNOTATION); } /** * Construct a AnnotationElementValue object. */ public AnnotationElementValue(Annotation annotationValue) { super(TAGTYPE_ANNOTATION); m_annotationValue = annotationValue; } // ----- accessors -------------------------------------------------- /** * Get the annotation. */ public Annotation getAnnotation() { return m_annotationValue; } /** * Set the annotation. */ public void setAnnotation(Annotation annotationValue) { m_annotationValue = annotationValue; m_fModified = true; } /** * Get the assembled size in bytes of this element value structure. */ protected int getSize() { return m_annotationValue.getSize() + super.getSize(); } /** * Determine if the attribute has been modified. * * @return true if the attribute has been modified */ public boolean isModified() { return super.isModified() || m_annotationValue.isModified(); } // ----- VMStructure operations ------------------------------------- /** * The disassembly process reads the structure from the passed input * stream and uses the constant pool to dereference any constant * references. * * @param stream the stream implementing java.io.DataInput from which * to read the assembled VM structure * @param pool the constant pool for the class which contains any * constants referenced by this VM structure */ protected void disassemble(DataInput stream, ConstantPool pool) throws IOException { m_annotationValue = new Annotation(); m_annotationValue.disassemble(stream, pool); } /** * The pre-assembly step collects the necessary entries for the constant * pool. During this step, all constants used by this VM structure and * any sub-structures are registered with (but not yet bound by position * in) the constant pool. * * @param pool the constant pool for the class which needs to be * populated with the constants required to build this * VM structure */ protected void preassemble(ConstantPool pool) { m_annotationValue.preassemble(pool); } /** * The assembly process assembles and writes the structure to the passed * output stream, resolving any dependencies using the passed constant * pool. * * @param stream the stream implementing java.io.DataOutput to which to * write the assembled VM structure * @param pool the constant pool for the class which by this point * contains the entire set of constants required to build * this VM structure */ protected void assemble(DataOutput stream, ConstantPool pool) throws IOException { super.assemble(stream, pool); m_annotationValue.assemble(stream, pool); } // ----- data members ----------------------------------------------- /** * The annotation value. */ private Annotation m_annotationValue; } /** * Represents an array element value in an annotation structure. */ public static class ArrayElementValue extends AbstractElementValue { // ----- constructors ----------------------------------------------- /** * Construct a ArrayElementValue object. Used during disassembly. */ protected ArrayElementValue() { super(TAGTYPE_ARRAY); } /** * Construct a ArrayElementValue object. */ public ArrayElementValue(List listElement) { super(TAGTYPE_ARRAY); m_listElement = new Vector(listElement); } // ----- accessors -------------------------------------------------- /** * Get the list of elements. */ public Iterator getElements() { return m_listElement.iterator(); } /** * Add elementValue to the list of elements. */ public void add(AbstractElementValue elementValue) { m_listElement.addElement(elementValue); m_fModified = true; } /** * Clear the list of elements. */ public void clear() { m_listElement.clear(); m_fModified = true; } /** * Set the list of elements. */ public void setElements(List listElement) { m_listElement.clear(); m_listElement.addAll(listElement); m_fModified = true; } /** * Get the assembled size in bytes of this element value structure. */ protected int getSize() { int cBytes = super.getSize(); cBytes += 2; /* num_values */ for (Iterator iter = m_listElement.iterator(); iter.hasNext();) { cBytes += ((AbstractElementValue) iter.next()).getSize(); } return cBytes; } /** * Determine if the attribute has been modified. * * @return true if the attribute has been modified */ public boolean isModified() { for (Iterator iter = m_listElement.iterator(); iter.hasNext();) { if (((AbstractElementValue) iter.next()).isModified()) { return true; } } return super.isModified(); } // ----- VMStructure operations ------------------------------------- /** * The disassembly process reads the structure from the passed input * stream and uses the constant pool to dereference any constant * references. * * @param stream the stream implementing java.io.DataInput from which * to read the assembled VM structure * @param pool the constant pool for the class which contains any * constants referenced by this VM structure */ protected void disassemble(DataInput stream, ConstantPool pool) throws IOException { int cElement = stream.readUnsignedShort(); for (int i = 0; i < cElement; i++) { AbstractElementValue elementValue = AbstractElementValue.loadElementValue(stream, pool); m_listElement.addElement(elementValue); } } /** * The pre-assembly step collects the necessary entries for the constant * pool. During this step, all constants used by this VM structure and * any sub-structures are registered with (but not yet bound by position * in) the constant pool. * * @param pool the constant pool for the class which needs to be * populated with the constants required to build this * VM structure */ protected void preassemble(ConstantPool pool) { for (Iterator iter = m_listElement.iterator(); iter.hasNext();) { ((AbstractElementValue) iter.next()).preassemble(pool); } } /** * The assembly process assembles and writes the structure to the passed * output stream, resolving any dependencies using the passed constant * pool. * * @param stream the stream implementing java.io.DataOutput to which to * write the assembled VM structure * @param pool the constant pool for the class which by this point * contains the entire set of constants required to build * this VM structure */ protected void assemble(DataOutput stream, ConstantPool pool) throws IOException { super.assemble(stream, pool); stream.writeShort(m_listElement.size()); for (Iterator iter = m_listElement.iterator(); iter.hasNext();) { ((AbstractElementValue) iter.next()).assemble(stream, pool); } } // ----- data members ----------------------------------------------- /** * The list of element values. */ private Vector m_listElement = new Vector(); } }
openjdk/jdk8
35,369
jaxws/src/share/jaxws_classes/com/sun/tools/internal/ws/resources/WsdlMessages.java
/* * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.internal.ws.resources; import com.sun.istack.internal.localization.Localizable; import com.sun.istack.internal.localization.LocalizableMessageFactory; import com.sun.istack.internal.localization.Localizer; /** * Defines string formatting method for each constant in the resource file * */ public final class WsdlMessages { private final static LocalizableMessageFactory messageFactory = new LocalizableMessageFactory("com.sun.tools.internal.ws.resources.wsdl"); private final static Localizer localizer = new Localizer(); public static Localizable localizablePARSING_ELEMENT_EXPECTED() { return messageFactory.getMessage("parsing.elementExpected"); } /** * unexpected non-element found * */ public static String PARSING_ELEMENT_EXPECTED() { return localizer.localize(localizablePARSING_ELEMENT_EXPECTED()); } public static Localizable localizableENTITY_NOT_FOUND_BINDING(Object arg0, Object arg1) { return messageFactory.getMessage("entity.notFound.binding", arg0, arg1); } /** * wsdl:binding "{0}" referenced by wsdl:port "{1}", but it's not found in the wsdl * */ public static String ENTITY_NOT_FOUND_BINDING(Object arg0, Object arg1) { return localizer.localize(localizableENTITY_NOT_FOUND_BINDING(arg0, arg1)); } public static Localizable localizablePARSING_UNABLE_TO_GET_METADATA(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.unableToGetMetadata", arg0, arg1); } /** * {0} * * {1} * */ public static String PARSING_UNABLE_TO_GET_METADATA(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_UNABLE_TO_GET_METADATA(arg0, arg1)); } public static Localizable localizablePARSING_PARSE_FAILED() { return messageFactory.getMessage("Parsing.ParseFailed"); } /** * Failed to parse the WSDL. * */ public static String PARSING_PARSE_FAILED() { return localizer.localize(localizablePARSING_PARSE_FAILED()); } public static Localizable localizablePARSING_INVALID_ATTRIBUTE_VALUE(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.invalidAttributeValue", arg0, arg1); } /** * invalid value "{1}" for attribute "{0}" * */ public static String PARSING_INVALID_ATTRIBUTE_VALUE(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_INVALID_ATTRIBUTE_VALUE(arg0, arg1)); } public static Localizable localizableVALIDATION_INVALID_ATTRIBUTE_VALUE(Object arg0, Object arg1) { return messageFactory.getMessage("validation.invalidAttributeValue", arg0, arg1); } /** * invalid value "{1}" for attribute "{0}" * */ public static String VALIDATION_INVALID_ATTRIBUTE_VALUE(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_INVALID_ATTRIBUTE_VALUE(arg0, arg1)); } public static Localizable localizablePARSING_INVALID_TAG(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.invalidTag", arg0, arg1); } /** * expected element "{1}", found "{0}" * */ public static String PARSING_INVALID_TAG(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_INVALID_TAG(arg0, arg1)); } public static Localizable localizableENTITY_NOT_FOUND_PORT_TYPE(Object arg0, Object arg1) { return messageFactory.getMessage("entity.notFound.portType", arg0, arg1); } /** * wsdl:portType "{0}" referenced by wsdl:binding "{1}", but it's not found in the wsdl * */ public static String ENTITY_NOT_FOUND_PORT_TYPE(Object arg0, Object arg1) { return localizer.localize(localizableENTITY_NOT_FOUND_PORT_TYPE(arg0, arg1)); } public static Localizable localizablePARSING_MISSING_REQUIRED_ATTRIBUTE(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.missingRequiredAttribute", arg0, arg1); } /** * missing required attribute "{1}" of element "{0}" * */ public static String PARSING_MISSING_REQUIRED_ATTRIBUTE(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_MISSING_REQUIRED_ATTRIBUTE(arg0, arg1)); } public static Localizable localizablePARSING_INVALID_ELEMENT(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.invalidElement", arg0, arg1); } /** * invalid element: "{0}" (in namespace "{1}") * */ public static String PARSING_INVALID_ELEMENT(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_INVALID_ELEMENT(arg0, arg1)); } public static Localizable localizableVALIDATION_INVALID_ELEMENT(Object arg0) { return messageFactory.getMessage("validation.invalidElement", arg0); } /** * invalid element: "{0}" * */ public static String VALIDATION_INVALID_ELEMENT(Object arg0) { return localizer.localize(localizableVALIDATION_INVALID_ELEMENT(arg0)); } public static Localizable localizableINTERNALIZER_TWO_VERSION_ATTRIBUTES() { return messageFactory.getMessage("Internalizer.TwoVersionAttributes"); } /** * Both jaxws:version and version are present * */ public static String INTERNALIZER_TWO_VERSION_ATTRIBUTES() { return localizer.localize(localizableINTERNALIZER_TWO_VERSION_ATTRIBUTES()); } public static Localizable localizableVALIDATION_DUPLICATE_PART_NAME(Object arg0, Object arg1) { return messageFactory.getMessage("validation.duplicatePartName", arg0, arg1); } /** * Invalid WSDL, duplicate parts in a wsdl:message is not allowed. * wsdl:message {0} has a duplicated part name: "{1}" * */ public static String VALIDATION_DUPLICATE_PART_NAME(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_DUPLICATE_PART_NAME(arg0, arg1)); } public static Localizable localizablePARSING_INVALID_WSDL_ELEMENT(Object arg0) { return messageFactory.getMessage("parsing.invalidWsdlElement", arg0); } /** * invalid WSDL element: "{0}" * */ public static String PARSING_INVALID_WSDL_ELEMENT(Object arg0) { return localizer.localize(localizablePARSING_INVALID_WSDL_ELEMENT(arg0)); } public static Localizable localizablePARSING_NON_WHITESPACE_TEXT_FOUND(Object arg0) { return messageFactory.getMessage("parsing.nonWhitespaceTextFound", arg0); } /** * found unexpected non-whitespace text: "{0}" * */ public static String PARSING_NON_WHITESPACE_TEXT_FOUND(Object arg0) { return localizer.localize(localizablePARSING_NON_WHITESPACE_TEXT_FOUND(arg0)); } public static Localizable localizableINTERNALIZER_TARGET_NOT_FOUND(Object arg0) { return messageFactory.getMessage("internalizer.targetNotFound", arg0); } /** * No target found for the wsdlLocation: {0} * */ public static String INTERNALIZER_TARGET_NOT_FOUND(Object arg0) { return localizer.localize(localizableINTERNALIZER_TARGET_NOT_FOUND(arg0)); } public static Localizable localizablePARSING_SAX_EXCEPTION_WITH_SYSTEM_ID(Object arg0) { return messageFactory.getMessage("parsing.saxExceptionWithSystemId", arg0); } /** * invalid WSDL file! failed to parse document at "{0}" * */ public static String PARSING_SAX_EXCEPTION_WITH_SYSTEM_ID(Object arg0) { return localizer.localize(localizablePARSING_SAX_EXCEPTION_WITH_SYSTEM_ID(arg0)); } public static Localizable localizablePARSING_REQUIRED_EXTENSIBILITY_ELEMENT(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.requiredExtensibilityElement", arg0, arg1); } /** * unknown required extensibility element "{0}" (in namespace "{1}") * */ public static String PARSING_REQUIRED_EXTENSIBILITY_ELEMENT(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_REQUIRED_EXTENSIBILITY_ELEMENT(arg0, arg1)); } public static Localizable localizableENTITY_NOT_FOUND_BY_ID(Object arg0) { return messageFactory.getMessage("entity.notFoundByID", arg0); } /** * invalid entity id: "{0}" * */ public static String ENTITY_NOT_FOUND_BY_ID(Object arg0) { return localizer.localize(localizableENTITY_NOT_FOUND_BY_ID(arg0)); } public static Localizable localizableVALIDATION_EXCLUSIVE_ATTRIBUTES(Object arg0, Object arg1) { return messageFactory.getMessage("validation.exclusiveAttributes", arg0, arg1); } /** * exclusive attributes: "{0}", "{1}" * */ public static String VALIDATION_EXCLUSIVE_ATTRIBUTES(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_EXCLUSIVE_ATTRIBUTES(arg0, arg1)); } public static Localizable localizableVALIDATION_MISSING_REQUIRED_SUB_ENTITY(Object arg0, Object arg1) { return messageFactory.getMessage("validation.missingRequiredSubEntity", arg0, arg1); } /** * missing required sub-entity "{0}" of element "{1}" * */ public static String VALIDATION_MISSING_REQUIRED_SUB_ENTITY(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_MISSING_REQUIRED_SUB_ENTITY(arg0, arg1)); } public static Localizable localizableINTERNALIZER_INCORRECT_VERSION() { return messageFactory.getMessage("Internalizer.IncorrectVersion"); } /** * JAXWS version attribute must be "2.0" * */ public static String INTERNALIZER_INCORRECT_VERSION() { return localizer.localize(localizableINTERNALIZER_INCORRECT_VERSION()); } public static Localizable localizableLOCALIZED_ERROR(Object arg0) { return messageFactory.getMessage("localized.error", arg0); } /** * {0} * */ public static String LOCALIZED_ERROR(Object arg0) { return localizer.localize(localizableLOCALIZED_ERROR(arg0)); } public static Localizable localizableENTITY_DUPLICATE_WITH_TYPE(Object arg0, Object arg1) { return messageFactory.getMessage("entity.duplicateWithType", arg0, arg1); } /** * duplicate "{0}" entity: "{1}" * */ public static String ENTITY_DUPLICATE_WITH_TYPE(Object arg0, Object arg1) { return localizer.localize(localizableENTITY_DUPLICATE_WITH_TYPE(arg0, arg1)); } public static Localizable localizablePARSING_ONLY_ONE_OF_ELEMENT_OR_TYPE_REQUIRED(Object arg0) { return messageFactory.getMessage("parsing.onlyOneOfElementOrTypeRequired", arg0); } /** * only one of the "element" or "type" attributes is allowed in part "{0}" * */ public static String PARSING_ONLY_ONE_OF_ELEMENT_OR_TYPE_REQUIRED(Object arg0) { return localizer.localize(localizablePARSING_ONLY_ONE_OF_ELEMENT_OR_TYPE_REQUIRED(arg0)); } public static Localizable localizableVALIDATION_UNSUPPORTED_USE_ENCODED(Object arg0, Object arg1) { return messageFactory.getMessage("validation.unsupportedUse.encoded", arg0, arg1); } /** * "Use of SOAP Encoding is not supported. * SOAP extension element on line {0} in {1} has use="encoded" " * */ public static String VALIDATION_UNSUPPORTED_USE_ENCODED(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_UNSUPPORTED_USE_ENCODED(arg0, arg1)); } public static Localizable localizablePARSING_INCORRECT_ROOT_ELEMENT(Object arg0, Object arg1, Object arg2, Object arg3) { return messageFactory.getMessage("parsing.incorrectRootElement", arg0, arg1, arg2, arg3); } /** * expected root element "{2}" (in namespace "{3}"), found element "{0}" (in namespace "{1}") * */ public static String PARSING_INCORRECT_ROOT_ELEMENT(Object arg0, Object arg1, Object arg2, Object arg3) { return localizer.localize(localizablePARSING_INCORRECT_ROOT_ELEMENT(arg0, arg1, arg2, arg3)); } public static Localizable localizableTRY_WITH_MEX(Object arg0) { return messageFactory.getMessage("try.with.mex", arg0); } /** * {0} * * retrying with MEX... * */ public static String TRY_WITH_MEX(Object arg0) { return localizer.localize(localizableTRY_WITH_MEX(arg0)); } public static Localizable localizableVALIDATION_MISSING_REQUIRED_ATTRIBUTE(Object arg0, Object arg1) { return messageFactory.getMessage("validation.missingRequiredAttribute", arg0, arg1); } /** * missing required attribute "{0}" of element "{1}" * */ public static String VALIDATION_MISSING_REQUIRED_ATTRIBUTE(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_MISSING_REQUIRED_ATTRIBUTE(arg0, arg1)); } public static Localizable localizablePARSING_IO_EXCEPTION(Object arg0) { return messageFactory.getMessage("parsing.ioException", arg0); } /** * parsing failed: {0} * */ public static String PARSING_IO_EXCEPTION(Object arg0) { return localizer.localize(localizablePARSING_IO_EXCEPTION(arg0)); } public static Localizable localizableINTERNALIZER_X_PATH_EVAULATES_TO_TOO_MANY_TARGETS(Object arg0, Object arg1) { return messageFactory.getMessage("internalizer.XPathEvaulatesToTooManyTargets", arg0, arg1); } /** * XPath evaluation of "{0}" results in too many ({1}) target nodes * */ public static String INTERNALIZER_X_PATH_EVAULATES_TO_TOO_MANY_TARGETS(Object arg0, Object arg1) { return localizer.localize(localizableINTERNALIZER_X_PATH_EVAULATES_TO_TOO_MANY_TARGETS(arg0, arg1)); } public static Localizable localizablePARSER_NOT_A_BINDING_FILE(Object arg0, Object arg1) { return messageFactory.getMessage("Parser.NotABindingFile", arg0, arg1); } /** * not an external binding file. The root element must be '{'http://java.sun.com/xml/ns/jaxws'}'bindings but it is '{'{0}'}'{1} * */ public static String PARSER_NOT_A_BINDING_FILE(Object arg0, Object arg1) { return localizer.localize(localizablePARSER_NOT_A_BINDING_FILE(arg0, arg1)); } public static Localizable localizablePARSING_UNKNOWN_NAMESPACE_PREFIX(Object arg0) { return messageFactory.getMessage("parsing.unknownNamespacePrefix", arg0); } /** * undeclared namespace prefix: "{0}" * */ public static String PARSING_UNKNOWN_NAMESPACE_PREFIX(Object arg0) { return localizer.localize(localizablePARSING_UNKNOWN_NAMESPACE_PREFIX(arg0)); } public static Localizable localizablePARSING_FACTORY_CONFIG_EXCEPTION(Object arg0) { return messageFactory.getMessage("parsing.factoryConfigException", arg0); } /** * invalid WSDL file! parsing failed: {0} * */ public static String PARSING_FACTORY_CONFIG_EXCEPTION(Object arg0) { return localizer.localize(localizablePARSING_FACTORY_CONFIG_EXCEPTION(arg0)); } public static Localizable localizableVALIDATION_MISSING_REQUIRED_PROPERTY(Object arg0, Object arg1) { return messageFactory.getMessage("validation.missingRequiredProperty", arg0, arg1); } /** * missing required property "{0}" of element "{1}" * */ public static String VALIDATION_MISSING_REQUIRED_PROPERTY(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_MISSING_REQUIRED_PROPERTY(arg0, arg1)); } public static Localizable localizablePARSING_INVALID_OPERATION_STYLE(Object arg0) { return messageFactory.getMessage("parsing.invalidOperationStyle", arg0); } /** * operation "{0}" has an invalid style * */ public static String PARSING_INVALID_OPERATION_STYLE(Object arg0) { return localizer.localize(localizablePARSING_INVALID_OPERATION_STYLE(arg0)); } public static Localizable localizableINTERNALIZER_X_PATH_EVALUATION_ERROR(Object arg0) { return messageFactory.getMessage("internalizer.XPathEvaluationError", arg0); } /** * XPath error: {0} * */ public static String INTERNALIZER_X_PATH_EVALUATION_ERROR(Object arg0) { return localizer.localize(localizableINTERNALIZER_X_PATH_EVALUATION_ERROR(arg0)); } public static Localizable localizableVALIDATION_INVALID_SUB_ENTITY(Object arg0, Object arg1) { return messageFactory.getMessage("validation.invalidSubEntity", arg0, arg1); } /** * invalid sub-element "{0}" of element "{1}" * */ public static String VALIDATION_INVALID_SUB_ENTITY(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_INVALID_SUB_ENTITY(arg0, arg1)); } public static Localizable localizableVALIDATION_SHOULD_NOT_HAPPEN(Object arg0) { return messageFactory.getMessage("validation.shouldNotHappen", arg0); } /** * internal error ("{0}") * */ public static String VALIDATION_SHOULD_NOT_HAPPEN(Object arg0) { return localizer.localize(localizableVALIDATION_SHOULD_NOT_HAPPEN(arg0)); } public static Localizable localizableABSTRACT_REFERENCE_FINDER_IMPL_UNABLE_TO_PARSE(Object arg0, Object arg1) { return messageFactory.getMessage("AbstractReferenceFinderImpl.UnableToParse", arg0, arg1); } /** * Unable to parse "{0}" : {1} * */ public static String ABSTRACT_REFERENCE_FINDER_IMPL_UNABLE_TO_PARSE(Object arg0, Object arg1) { return localizer.localize(localizableABSTRACT_REFERENCE_FINDER_IMPL_UNABLE_TO_PARSE(arg0, arg1)); } public static Localizable localizableWARNING_FAULT_EMPTY_ACTION(Object arg0, Object arg1, Object arg2) { return messageFactory.getMessage("warning.faultEmptyAction", arg0, arg1, arg2); } /** * ignoring empty Action in "{0}" {1} element of "{2}" operation, using default instead * */ public static String WARNING_FAULT_EMPTY_ACTION(Object arg0, Object arg1, Object arg2) { return localizer.localize(localizableWARNING_FAULT_EMPTY_ACTION(arg0, arg1, arg2)); } public static Localizable localizablePARSING_INVALID_EXTENSION_ELEMENT(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.invalidExtensionElement", arg0, arg1); } /** * invalid extension element: "{0}" (in namespace "{1}") * */ public static String PARSING_INVALID_EXTENSION_ELEMENT(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_INVALID_EXTENSION_ELEMENT(arg0, arg1)); } public static Localizable localizableINTERNALIZER_X_PATH_EVALUATES_TO_NON_ELEMENT(Object arg0) { return messageFactory.getMessage("internalizer.XPathEvaluatesToNonElement", arg0); } /** * XPath evaluation of "{0}" needs to result in an element. * */ public static String INTERNALIZER_X_PATH_EVALUATES_TO_NON_ELEMENT(Object arg0) { return localizer.localize(localizableINTERNALIZER_X_PATH_EVALUATES_TO_NON_ELEMENT(arg0)); } public static Localizable localizableINTERNALIZER_X_PATH_EVALUATES_TO_NO_TARGET(Object arg0) { return messageFactory.getMessage("internalizer.XPathEvaluatesToNoTarget", arg0); } /** * XPath evaluation of "{0}" results in an empty target node * */ public static String INTERNALIZER_X_PATH_EVALUATES_TO_NO_TARGET(Object arg0) { return localizer.localize(localizableINTERNALIZER_X_PATH_EVALUATES_TO_NO_TARGET(arg0)); } public static Localizable localizablePARSING_SAX_EXCEPTION(Object arg0) { return messageFactory.getMessage("parsing.saxException", arg0); } /** * invalid WSDL file! parsing failed: {0} * */ public static String PARSING_SAX_EXCEPTION(Object arg0) { return localizer.localize(localizablePARSING_SAX_EXCEPTION(arg0)); } public static Localizable localizableINVALID_CUSTOMIZATION_NAMESPACE(Object arg0) { return messageFactory.getMessage("invalid.customization.namespace", arg0); } /** * Ignoring customization: "{0}", because it has no namespace. It must belong to the customization namespace. * */ public static String INVALID_CUSTOMIZATION_NAMESPACE(Object arg0) { return localizer.localize(localizableINVALID_CUSTOMIZATION_NAMESPACE(arg0)); } public static Localizable localizableVALIDATION_INVALID_ATTRIBUTE(Object arg0, Object arg1) { return messageFactory.getMessage("validation.invalidAttribute", arg0, arg1); } /** * invalid attribute "{0}" of element "{1}" * */ public static String VALIDATION_INVALID_ATTRIBUTE(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_INVALID_ATTRIBUTE(arg0, arg1)); } public static Localizable localizablePARSING_PARSER_CONFIG_EXCEPTION(Object arg0) { return messageFactory.getMessage("parsing.parserConfigException", arg0); } /** * invalid WSDL file! parsing failed: {0} * */ public static String PARSING_PARSER_CONFIG_EXCEPTION(Object arg0) { return localizer.localize(localizablePARSING_PARSER_CONFIG_EXCEPTION(arg0)); } public static Localizable localizablePARSING_ONLY_ONE_TYPES_ALLOWED(Object arg0) { return messageFactory.getMessage("parsing.onlyOneTypesAllowed", arg0); } /** * only one "types" element allowed in "{0}" * */ public static String PARSING_ONLY_ONE_TYPES_ALLOWED(Object arg0) { return localizer.localize(localizablePARSING_ONLY_ONE_TYPES_ALLOWED(arg0)); } public static Localizable localizablePARSING_INVALID_URI(Object arg0) { return messageFactory.getMessage("parsing.invalidURI", arg0); } /** * invalid URI: {0} * */ public static String PARSING_INVALID_URI(Object arg0) { return localizer.localize(localizablePARSING_INVALID_URI(arg0)); } public static Localizable localizableVALIDATION_INCORRECT_TARGET_NAMESPACE(Object arg0, Object arg1) { return messageFactory.getMessage("validation.incorrectTargetNamespace", arg0, arg1); } /** * target namespace is incorrect (expected: {1}, found: {0}) * */ public static String VALIDATION_INCORRECT_TARGET_NAMESPACE(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_INCORRECT_TARGET_NAMESPACE(arg0, arg1)); } public static Localizable localizableENTITY_NOT_FOUND_BY_Q_NAME(Object arg0, Object arg1, Object arg2) { return messageFactory.getMessage("entity.notFoundByQName", arg0, arg1, arg2); } /** * {0} "{1}" not found in the wsdl: {2} * */ public static String ENTITY_NOT_FOUND_BY_Q_NAME(Object arg0, Object arg1, Object arg2) { return localizer.localize(localizableENTITY_NOT_FOUND_BY_Q_NAME(arg0, arg1, arg2)); } public static Localizable localizableINVALID_WSDL(Object arg0, Object arg1, Object arg2, Object arg3) { return messageFactory.getMessage("invalid.wsdl", arg0, arg1, arg2, arg3); } /** * Invalid WSDL {0}, expected {1} found {2} at (line {3}) * */ public static String INVALID_WSDL(Object arg0, Object arg1, Object arg2, Object arg3) { return localizer.localize(localizableINVALID_WSDL(arg0, arg1, arg2, arg3)); } public static Localizable localizablePARSING_UNKNOWN_IMPORTED_DOCUMENT_TYPE(Object arg0) { return messageFactory.getMessage("parsing.unknownImportedDocumentType", arg0); } /** * imported document is of unknown type: {0} * */ public static String PARSING_UNKNOWN_IMPORTED_DOCUMENT_TYPE(Object arg0) { return localizer.localize(localizablePARSING_UNKNOWN_IMPORTED_DOCUMENT_TYPE(arg0)); } public static Localizable localizablePARSING_IO_EXCEPTION_WITH_SYSTEM_ID(Object arg0) { return messageFactory.getMessage("parsing.ioExceptionWithSystemId", arg0); } /** * failed to parse document at "{0}" * */ public static String PARSING_IO_EXCEPTION_WITH_SYSTEM_ID(Object arg0) { return localizer.localize(localizablePARSING_IO_EXCEPTION_WITH_SYSTEM_ID(arg0)); } public static Localizable localizableVALIDATION_AMBIGUOUS_NAME(Object arg0) { return messageFactory.getMessage("validation.ambiguousName", arg0); } /** * ambiguous operation name: "{0}" * */ public static String VALIDATION_AMBIGUOUS_NAME(Object arg0) { return localizer.localize(localizableVALIDATION_AMBIGUOUS_NAME(arg0)); } public static Localizable localizablePARSING_WSDL_NOT_DEFAULT_NAMESPACE(Object arg0) { return messageFactory.getMessage("parsing.wsdlNotDefaultNamespace", arg0); } /** * default namespace must be "{0}" * */ public static String PARSING_WSDL_NOT_DEFAULT_NAMESPACE(Object arg0) { return localizer.localize(localizablePARSING_WSDL_NOT_DEFAULT_NAMESPACE(arg0)); } public static Localizable localizablePARSING_UNKNOWN_EXTENSIBILITY_ELEMENT_OR_ATTRIBUTE(Object arg0, Object arg1) { return messageFactory.getMessage("parsing.unknownExtensibilityElementOrAttribute", arg0, arg1); } /** * unknown extensibility element or attribute "{0}" (in namespace "{1}") * */ public static String PARSING_UNKNOWN_EXTENSIBILITY_ELEMENT_OR_ATTRIBUTE(Object arg0, Object arg1) { return localizer.localize(localizablePARSING_UNKNOWN_EXTENSIBILITY_ELEMENT_OR_ATTRIBUTE(arg0, arg1)); } public static Localizable localizableVALIDATION_DUPLICATED_ELEMENT(Object arg0) { return messageFactory.getMessage("validation.duplicatedElement", arg0); } /** * duplicated element: "{0}" * */ public static String VALIDATION_DUPLICATED_ELEMENT(Object arg0) { return localizer.localize(localizableVALIDATION_DUPLICATED_ELEMENT(arg0)); } public static Localizable localizableINTERNALIZER_TARGET_NOT_AN_ELEMENT() { return messageFactory.getMessage("internalizer.targetNotAnElement"); } /** * Target node is not an element * */ public static String INTERNALIZER_TARGET_NOT_AN_ELEMENT() { return localizer.localize(localizableINTERNALIZER_TARGET_NOT_AN_ELEMENT()); } public static Localizable localizableWARNING_INPUT_OUTPUT_EMPTY_ACTION(Object arg0, Object arg1) { return messageFactory.getMessage("warning.inputOutputEmptyAction", arg0, arg1); } /** * ignoring empty Action in {0} element of "{1}" operation, using default instead * */ public static String WARNING_INPUT_OUTPUT_EMPTY_ACTION(Object arg0, Object arg1) { return localizer.localize(localizableWARNING_INPUT_OUTPUT_EMPTY_ACTION(arg0, arg1)); } public static Localizable localizablePARSING_INVALID_TAG_NS(Object arg0, Object arg1, Object arg2, Object arg3, Object arg4) { return messageFactory.getMessage("parsing.invalidTagNS", arg0, arg1, arg2, arg3, arg4); } /** * Invalid WSDL at {4}: expected element "{2}" (in namespace "{3}"), found element "{0}" (in namespace "{1}") * */ public static String PARSING_INVALID_TAG_NS(Object arg0, Object arg1, Object arg2, Object arg3, Object arg4) { return localizer.localize(localizablePARSING_INVALID_TAG_NS(arg0, arg1, arg2, arg3, arg4)); } public static Localizable localizableINVALID_WSDL_WITH_DOOC(Object arg0, Object arg1) { return messageFactory.getMessage("invalid.wsdl.with.dooc", arg0, arg1); } /** * "Not a WSDL document: {0}, it gives "{1}", retrying with MEX..." * */ public static String INVALID_WSDL_WITH_DOOC(Object arg0, Object arg1) { return localizer.localize(localizableINVALID_WSDL_WITH_DOOC(arg0, arg1)); } public static Localizable localizablePARSING_NOT_AWSDL(Object arg0) { return messageFactory.getMessage("Parsing.NotAWSDL", arg0); } /** * Failed to get WSDL components, probably {0} is not a valid WSDL file. * */ public static String PARSING_NOT_AWSDL(Object arg0) { return localizer.localize(localizablePARSING_NOT_AWSDL(arg0)); } public static Localizable localizableENTITY_DUPLICATE(Object arg0) { return messageFactory.getMessage("entity.duplicate", arg0); } /** * duplicate entity: "{0}" * */ public static String ENTITY_DUPLICATE(Object arg0) { return localizer.localize(localizableENTITY_DUPLICATE(arg0)); } public static Localizable localizableWARNING_WSI_R_2004() { return messageFactory.getMessage("warning.wsi.r2004"); } /** * Not a WSI-BP compliant WSDL (R2001, R2004). xsd:import must not import XML Schema definitions embedded inline within the WSDL document. * */ public static String WARNING_WSI_R_2004() { return localizer.localize(localizableWARNING_WSI_R_2004()); } public static Localizable localizableWARNING_WSI_R_2003() { return messageFactory.getMessage("warning.wsi.r2003"); } /** * Not a WSI-BP compliant WSDL (R2003). xsd:import must only be used inside xsd:schema elements. * */ public static String WARNING_WSI_R_2003() { return localizer.localize(localizableWARNING_WSI_R_2003()); } public static Localizable localizableWARNING_WSI_R_2002(Object arg0, Object arg1) { return messageFactory.getMessage("warning.wsi.r2002", arg0, arg1); } /** * Not a WSI-BP compliant WSDL (R2002). wsdl:import must not be used to import XML Schema embedded in the WSDL document. Expected WSDL namespace: {0}, found: {1} * */ public static String WARNING_WSI_R_2002(Object arg0, Object arg1) { return localizer.localize(localizableWARNING_WSI_R_2002(arg0, arg1)); } public static Localizable localizablePARSING_ELEMENT_OR_TYPE_REQUIRED(Object arg0) { return messageFactory.getMessage("parsing.elementOrTypeRequired", arg0); } /** * warning: part {0} is ignored, either the "element" or the "type" attribute is required in part "{0}" * */ public static String PARSING_ELEMENT_OR_TYPE_REQUIRED(Object arg0) { return localizer.localize(localizablePARSING_ELEMENT_OR_TYPE_REQUIRED(arg0)); } public static Localizable localizableWARNING_WSI_R_2001() { return messageFactory.getMessage("warning.wsi.r2001"); } /** * Not a WSI-BP compliant WSDL (R2001, R2002). wsdl:import must import only WSDL documents. It's trying to import: "{0}" * */ public static String WARNING_WSI_R_2001() { return localizer.localize(localizableWARNING_WSI_R_2001()); } public static Localizable localizableFILE_NOT_FOUND(Object arg0) { return messageFactory.getMessage("file.not.found", arg0); } /** * {0} is unreachable * */ public static String FILE_NOT_FOUND(Object arg0) { return localizer.localize(localizableFILE_NOT_FOUND(arg0)); } public static Localizable localizableVALIDATION_INVALID_SIMPLE_TYPE_IN_ELEMENT(Object arg0, Object arg1) { return messageFactory.getMessage("validation.invalidSimpleTypeInElement", arg0, arg1); } /** * invalid element: "{1}", has named simpleType: "{0}" * */ public static String VALIDATION_INVALID_SIMPLE_TYPE_IN_ELEMENT(Object arg0, Object arg1) { return localizer.localize(localizableVALIDATION_INVALID_SIMPLE_TYPE_IN_ELEMENT(arg0, arg1)); } public static Localizable localizablePARSING_ONLY_ONE_DOCUMENTATION_ALLOWED(Object arg0) { return messageFactory.getMessage("parsing.onlyOneDocumentationAllowed", arg0); } /** * only one "documentation" element allowed in "{0}" * */ public static String PARSING_ONLY_ONE_DOCUMENTATION_ALLOWED(Object arg0) { return localizer.localize(localizablePARSING_ONLY_ONE_DOCUMENTATION_ALLOWED(arg0)); } public static Localizable localizableINTERNALIZER_VERSION_NOT_PRESENT() { return messageFactory.getMessage("Internalizer.VersionNotPresent"); } /** * JAXWS version attribute must be present * */ public static String INTERNALIZER_VERSION_NOT_PRESENT() { return localizer.localize(localizableINTERNALIZER_VERSION_NOT_PRESENT()); } public static Localizable localizableFAILED_NOSERVICE(Object arg0) { return messageFactory.getMessage("failed.noservice", arg0); } /** * Could not find wsdl:service in the provided WSDL(s): * * {0} At least one WSDL with at least one service definition needs to be provided. * */ public static String FAILED_NOSERVICE(Object arg0) { return localizer.localize(localizableFAILED_NOSERVICE(arg0)); } public static Localizable localizablePARSING_TOO_MANY_ELEMENTS(Object arg0, Object arg1, Object arg2) { return messageFactory.getMessage("parsing.tooManyElements", arg0, arg1, arg2); } /** * too many "{0}" elements under "{1}" element "{2}" * */ public static String PARSING_TOO_MANY_ELEMENTS(Object arg0, Object arg1, Object arg2) { return localizer.localize(localizablePARSING_TOO_MANY_ELEMENTS(arg0, arg1, arg2)); } public static Localizable localizableINTERNALIZER_INCORRECT_SCHEMA_REFERENCE(Object arg0, Object arg1) { return messageFactory.getMessage("Internalizer.IncorrectSchemaReference", arg0, arg1); } /** * "{0}" is not a part of this compilation. Is this a mistake for "{1}"? * */ public static String INTERNALIZER_INCORRECT_SCHEMA_REFERENCE(Object arg0, Object arg1) { return localizer.localize(localizableINTERNALIZER_INCORRECT_SCHEMA_REFERENCE(arg0, arg1)); } }
googleapis/google-cloud-java
35,463
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/SearchConnectionsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * Request message for SearchConnections. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.SearchConnectionsRequest} */ public final class SearchConnectionsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.SearchConnectionsRequest) SearchConnectionsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SearchConnectionsRequest.newBuilder() to construct. private SearchConnectionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchConnectionsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchConnectionsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_SearchConnectionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_SearchConnectionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.SearchConnectionsRequest.class, com.google.privacy.dlp.v2.SearchConnectionsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Resource name of the organization or project with a wildcard * location, for example, `organizations/433245324/locations/-` or * `projects/project-id/locations/-`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Resource name of the organization or project with a wildcard * location, for example, `organizations/433245324/locations/-` or * `projects/project-id/locations/-`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Number of results per page, max 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Page token from a previous page to return the next set of * results. If set, all other request fields must match the original request. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. Page token from a previous page to return the next set of * results. If set, all other request fields must match the original request. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Supported field/value: - `state` - MISSING|AVAILABLE|ERROR * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Supported field/value: - `state` - MISSING|AVAILABLE|ERROR * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.SearchConnectionsRequest)) { return super.equals(obj); } com.google.privacy.dlp.v2.SearchConnectionsRequest other = (com.google.privacy.dlp.v2.SearchConnectionsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.SearchConnectionsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for SearchConnections. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.SearchConnectionsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.SearchConnectionsRequest) com.google.privacy.dlp.v2.SearchConnectionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_SearchConnectionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_SearchConnectionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.SearchConnectionsRequest.class, com.google.privacy.dlp.v2.SearchConnectionsRequest.Builder.class); } // Construct using com.google.privacy.dlp.v2.SearchConnectionsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_SearchConnectionsRequest_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.SearchConnectionsRequest getDefaultInstanceForType() { return com.google.privacy.dlp.v2.SearchConnectionsRequest.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.SearchConnectionsRequest build() { com.google.privacy.dlp.v2.SearchConnectionsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.SearchConnectionsRequest buildPartial() { com.google.privacy.dlp.v2.SearchConnectionsRequest result = new com.google.privacy.dlp.v2.SearchConnectionsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.privacy.dlp.v2.SearchConnectionsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.SearchConnectionsRequest) { return mergeFrom((com.google.privacy.dlp.v2.SearchConnectionsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.SearchConnectionsRequest other) { if (other == com.google.privacy.dlp.v2.SearchConnectionsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Resource name of the organization or project with a wildcard * location, for example, `organizations/433245324/locations/-` or * `projects/project-id/locations/-`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Resource name of the organization or project with a wildcard * location, for example, `organizations/433245324/locations/-` or * `projects/project-id/locations/-`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Resource name of the organization or project with a wildcard * location, for example, `organizations/433245324/locations/-` or * `projects/project-id/locations/-`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Resource name of the organization or project with a wildcard * location, for example, `organizations/433245324/locations/-` or * `projects/project-id/locations/-`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Resource name of the organization or project with a wildcard * location, for example, `organizations/433245324/locations/-` or * `projects/project-id/locations/-`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Number of results per page, max 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Number of results per page, max 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Number of results per page, max 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Page token from a previous page to return the next set of * results. If set, all other request fields must match the original request. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Page token from a previous page to return the next set of * results. If set, all other request fields must match the original request. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Page token from a previous page to return the next set of * results. If set, all other request fields must match the original request. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Page token from a previous page to return the next set of * results. If set, all other request fields must match the original request. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. Page token from a previous page to return the next set of * results. If set, all other request fields must match the original request. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Supported field/value: - `state` - MISSING|AVAILABLE|ERROR * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Supported field/value: - `state` - MISSING|AVAILABLE|ERROR * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Supported field/value: - `state` - MISSING|AVAILABLE|ERROR * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Supported field/value: - `state` - MISSING|AVAILABLE|ERROR * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Supported field/value: - `state` - MISSING|AVAILABLE|ERROR * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.SearchConnectionsRequest) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.SearchConnectionsRequest) private static final com.google.privacy.dlp.v2.SearchConnectionsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.SearchConnectionsRequest(); } public static com.google.privacy.dlp.v2.SearchConnectionsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchConnectionsRequest> PARSER = new com.google.protobuf.AbstractParser<SearchConnectionsRequest>() { @java.lang.Override public SearchConnectionsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchConnectionsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchConnectionsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.SearchConnectionsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,437
java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/ListProductsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.channel.v1; /** * * * <pre> * Request message for ListProducts. * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListProductsRequest} */ public final class ListProductsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.ListProductsRequest) ListProductsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListProductsRequest.newBuilder() to construct. private ListProductsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListProductsRequest() { account_ = ""; pageToken_ = ""; languageCode_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListProductsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListProductsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListProductsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListProductsRequest.class, com.google.cloud.channel.v1.ListProductsRequest.Builder.class); } public static final int ACCOUNT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object account_ = ""; /** * * * <pre> * Required. The resource name of the reseller account. * Format: accounts/{account_id}. * </pre> * * <code>string account = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The account. */ @java.lang.Override public java.lang.String getAccount() { java.lang.Object ref = account_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); account_ = s; return s; } } /** * * * <pre> * Required. The resource name of the reseller account. * Format: accounts/{account_id}. * </pre> * * <code>string account = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for account. */ @java.lang.Override public com.google.protobuf.ByteString getAccountBytes() { java.lang.Object ref = account_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); account_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Requested page size. Server might return fewer results than * requested. If unspecified, returns at most 100 Products. The maximum value * is 1000; the server will coerce values above 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token for a page of results other than the first page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A token for a page of results other than the first page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LANGUAGE_CODE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object languageCode_ = ""; /** * * * <pre> * Optional. The BCP-47 language code. For example, "en-US". The * response will localize in the corresponding language code, if specified. * The default value is "en-US". * </pre> * * <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The languageCode. */ @java.lang.Override public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } } /** * * * <pre> * Optional. The BCP-47 language code. For example, "en-US". The * response will localize in the corresponding language code, if specified. * The default value is "en-US". * </pre> * * <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for languageCode. */ @java.lang.Override public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(account_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, account_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, languageCode_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(account_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, account_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, languageCode_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.ListProductsRequest)) { return super.equals(obj); } com.google.cloud.channel.v1.ListProductsRequest other = (com.google.cloud.channel.v1.ListProductsRequest) obj; if (!getAccount().equals(other.getAccount())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getLanguageCode().equals(other.getLanguageCode())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ACCOUNT_FIELD_NUMBER; hash = (53 * hash) + getAccount().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getLanguageCode().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListProductsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListProductsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListProductsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.channel.v1.ListProductsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for ListProducts. * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListProductsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.ListProductsRequest) com.google.cloud.channel.v1.ListProductsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListProductsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListProductsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListProductsRequest.class, com.google.cloud.channel.v1.ListProductsRequest.Builder.class); } // Construct using com.google.cloud.channel.v1.ListProductsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; account_ = ""; pageSize_ = 0; pageToken_ = ""; languageCode_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListProductsRequest_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.ListProductsRequest getDefaultInstanceForType() { return com.google.cloud.channel.v1.ListProductsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.ListProductsRequest build() { com.google.cloud.channel.v1.ListProductsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.ListProductsRequest buildPartial() { com.google.cloud.channel.v1.ListProductsRequest result = new com.google.cloud.channel.v1.ListProductsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.channel.v1.ListProductsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.account_ = account_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.languageCode_ = languageCode_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.ListProductsRequest) { return mergeFrom((com.google.cloud.channel.v1.ListProductsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.channel.v1.ListProductsRequest other) { if (other == com.google.cloud.channel.v1.ListProductsRequest.getDefaultInstance()) return this; if (!other.getAccount().isEmpty()) { account_ = other.account_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getLanguageCode().isEmpty()) { languageCode_ = other.languageCode_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { account_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { languageCode_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object account_ = ""; /** * * * <pre> * Required. The resource name of the reseller account. * Format: accounts/{account_id}. * </pre> * * <code>string account = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The account. */ public java.lang.String getAccount() { java.lang.Object ref = account_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); account_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the reseller account. * Format: accounts/{account_id}. * </pre> * * <code>string account = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for account. */ public com.google.protobuf.ByteString getAccountBytes() { java.lang.Object ref = account_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); account_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the reseller account. * Format: accounts/{account_id}. * </pre> * * <code>string account = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The account to set. * @return This builder for chaining. */ public Builder setAccount(java.lang.String value) { if (value == null) { throw new NullPointerException(); } account_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the reseller account. * Format: accounts/{account_id}. * </pre> * * <code>string account = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearAccount() { account_ = getDefaultInstance().getAccount(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the reseller account. * Format: accounts/{account_id}. * </pre> * * <code>string account = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for account to set. * @return This builder for chaining. */ public Builder setAccountBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); account_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Requested page size. Server might return fewer results than * requested. If unspecified, returns at most 100 Products. The maximum value * is 1000; the server will coerce values above 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Requested page size. Server might return fewer results than * requested. If unspecified, returns at most 100 Products. The maximum value * is 1000; the server will coerce values above 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Requested page size. Server might return fewer results than * requested. If unspecified, returns at most 100 Products. The maximum value * is 1000; the server will coerce values above 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token for a page of results other than the first page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token for a page of results other than the first page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token for a page of results other than the first page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A token for a page of results other than the first page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A token for a page of results other than the first page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object languageCode_ = ""; /** * * * <pre> * Optional. The BCP-47 language code. For example, "en-US". The * response will localize in the corresponding language code, if specified. * The default value is "en-US". * </pre> * * <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The languageCode. */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The BCP-47 language code. For example, "en-US". The * response will localize in the corresponding language code, if specified. * The default value is "en-US". * </pre> * * <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for languageCode. */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The BCP-47 language code. For example, "en-US". The * response will localize in the corresponding language code, if specified. * The default value is "en-US". * </pre> * * <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCode(java.lang.String value) { if (value == null) { throw new NullPointerException(); } languageCode_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. The BCP-47 language code. For example, "en-US". The * response will localize in the corresponding language code, if specified. * The default value is "en-US". * </pre> * * <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearLanguageCode() { languageCode_ = getDefaultInstance().getLanguageCode(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. The BCP-47 language code. For example, "en-US". The * response will localize in the corresponding language code, if specified. * The default value is "en-US". * </pre> * * <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); languageCode_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.ListProductsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.ListProductsRequest) private static final com.google.cloud.channel.v1.ListProductsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.ListProductsRequest(); } public static com.google.cloud.channel.v1.ListProductsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListProductsRequest> PARSER = new com.google.protobuf.AbstractParser<ListProductsRequest>() { @java.lang.Override public ListProductsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListProductsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListProductsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.ListProductsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
35,743
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.legacy.api; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.CompositeType; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.Schema; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.catalog.Column; import org.apache.flink.table.catalog.ResolvedSchema; import org.apache.flink.table.expressions.DefaultSqlFactory; import org.apache.flink.table.expressions.SqlFactory; import org.apache.flink.table.legacy.api.TableColumn.ComputedColumn; import org.apache.flink.table.legacy.api.TableColumn.MetadataColumn; import org.apache.flink.table.legacy.api.TableColumn.PhysicalColumn; import org.apache.flink.table.legacy.api.constraints.UniqueConstraint; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.LegacyTypeInformationType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.LogicalTypeRoot; import org.apache.flink.table.types.logical.utils.LogicalTypeChecks; import org.apache.flink.table.types.utils.TypeConversions; import org.apache.flink.types.Row; import org.apache.flink.util.Preconditions; import org.apache.flink.util.StringUtils; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.apache.flink.table.api.DataTypes.FIELD; import static org.apache.flink.table.api.DataTypes.Field; import static org.apache.flink.table.api.DataTypes.ROW; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.canBeTimeAttributeType; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isCompositeType; import static org.apache.flink.table.types.utils.TypeConversions.fromDataTypeToLegacyInfo; import static org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType; /** * A table schema that represents a table's structure with field names and data types. * * @deprecated This class has been deprecated as part of FLIP-164. It has been replaced by two more * dedicated classes {@link Schema} and {@link ResolvedSchema}. Use {@link Schema} for * declaration in APIs. {@link ResolvedSchema} is offered by the framework after resolution and * validation. */ @Deprecated @Internal public class TableSchema { private static final String ATOMIC_TYPE_FIELD_NAME = "f0"; private final List<TableColumn> columns; private final List<WatermarkSpec> watermarkSpecs; private final @Nullable UniqueConstraint primaryKey; private TableSchema( List<TableColumn> columns, List<WatermarkSpec> watermarkSpecs, @Nullable UniqueConstraint primaryKey) { this.columns = Preconditions.checkNotNull(columns); this.watermarkSpecs = Preconditions.checkNotNull(watermarkSpecs); this.primaryKey = primaryKey; } /** * @deprecated Use the {@link Builder} instead. */ @Deprecated public TableSchema(String[] fieldNames, TypeInformation<?>[] fieldTypes) { DataType[] fieldDataTypes = fromLegacyInfoToDataType(fieldTypes); validateNameTypeNumberEqual(fieldNames, fieldDataTypes); List<TableColumn> columns = new ArrayList<>(); for (int i = 0; i < fieldNames.length; i++) { columns.add(TableColumn.physical(fieldNames[i], fieldDataTypes[i])); } validateColumnsAndWatermarkSpecs(columns, Collections.emptyList()); this.columns = columns; this.watermarkSpecs = Collections.emptyList(); this.primaryKey = null; } /** Returns a deep copy of the table schema. */ public TableSchema copy() { return new TableSchema( new ArrayList<>(columns), new ArrayList<>(watermarkSpecs), primaryKey); } /** Returns all field data types as an array. */ public DataType[] getFieldDataTypes() { return columns.stream().map(TableColumn::getType).toArray(DataType[]::new); } /** * @deprecated This method will be removed in future versions as it uses the old type system. It * is recommended to use {@link #getFieldDataTypes()} instead which uses the new type system * based on {@link DataTypes}. Please make sure to use either the old or the new type system * consistently to avoid unintended behavior. See the website documentation for more * information. */ @Deprecated public TypeInformation<?>[] getFieldTypes() { return fromDataTypeToLegacyInfo(getFieldDataTypes()); } /** * Returns the specified data type for the given field index. * * @param fieldIndex the index of the field */ public Optional<DataType> getFieldDataType(int fieldIndex) { if (fieldIndex < 0 || fieldIndex >= columns.size()) { return Optional.empty(); } return Optional.of(columns.get(fieldIndex).getType()); } /** * @deprecated This method will be removed in future versions as it uses the old type system. It * is recommended to use {@link #getFieldDataType(int)} instead which uses the new type * system based on {@link DataTypes}. Please make sure to use either the old or the new type * system consistently to avoid unintended behavior. See the website documentation for more * information. */ @Deprecated public Optional<TypeInformation<?>> getFieldType(int fieldIndex) { return getFieldDataType(fieldIndex).map(TypeConversions::fromDataTypeToLegacyInfo); } /** * Returns the specified data type for the given field name. * * @param fieldName the name of the field */ public Optional<DataType> getFieldDataType(String fieldName) { return this.columns.stream() .filter(column -> column.getName().equals(fieldName)) .findFirst() .map(TableColumn::getType); } /** * @deprecated This method will be removed in future versions as it uses the old type system. It * is recommended to use {@link #getFieldDataType(String)} instead which uses the new type * system based on {@link DataTypes}. Please make sure to use either the old or the new type * system consistently to avoid unintended behavior. See the website documentation for more * information. */ @Deprecated public Optional<TypeInformation<?>> getFieldType(String fieldName) { return getFieldDataType(fieldName).map(TypeConversions::fromDataTypeToLegacyInfo); } /** Returns the number of fields. */ public int getFieldCount() { return columns.size(); } /** Returns all field names as an array. */ public String[] getFieldNames() { return this.columns.stream().map(TableColumn::getName).toArray(String[]::new); } /** * Returns the specified name for the given field index. * * @param fieldIndex the index of the field */ public Optional<String> getFieldName(int fieldIndex) { if (fieldIndex < 0 || fieldIndex >= columns.size()) { return Optional.empty(); } return Optional.of(this.columns.get(fieldIndex).getName()); } /** * Returns the {@link TableColumn} instance for the given field index. * * @param fieldIndex the index of the field */ public Optional<TableColumn> getTableColumn(int fieldIndex) { if (fieldIndex < 0 || fieldIndex >= columns.size()) { return Optional.empty(); } return Optional.of(this.columns.get(fieldIndex)); } /** * Returns the {@link TableColumn} instance for the given field name. * * @param fieldName the name of the field */ public Optional<TableColumn> getTableColumn(String fieldName) { return this.columns.stream() .filter(column -> column.getName().equals(fieldName)) .findFirst(); } /** Returns all the {@link TableColumn}s for this table schema. */ public List<TableColumn> getTableColumns() { return new ArrayList<>(this.columns); } /** * Converts all columns of this schema into a (possibly nested) row data type. * * <p>This method returns the <b>source-to-query schema</b>. * * <p>Note: The returned row data type contains physical, computed, and metadata columns. Be * careful when using this method in a table source or table sink. In many cases, {@link * #toPhysicalRowDataType()} might be more appropriate. * * @see DataTypes#ROW(Field...) * @see #toPhysicalRowDataType() * @see #toPersistedRowDataType() */ public DataType toRowDataType() { final Field[] fields = columns.stream() .map(column -> FIELD(column.getName(), column.getType())) .toArray(Field[]::new); // The row should be never null. return ROW(fields).notNull(); } /** * Converts all physical columns of this schema into a (possibly nested) row data type. * * <p>Note: The returned row data type contains only physical columns. It does not include * computed or metadata columns. * * @see DataTypes#ROW(Field...) * @see #toRowDataType() * @see #toPersistedRowDataType() */ public DataType toPhysicalRowDataType() { final Field[] fields = columns.stream() .filter(TableColumn::isPhysical) .map(column -> FIELD(column.getName(), column.getType())) .toArray(Field[]::new); // The row should be never null. return ROW(fields).notNull(); } /** * Converts all persisted columns of this schema into a (possibly nested) row data type. * * <p>This method returns the <b>query-to-sink schema</b>. * * <p>Note: Computed columns and virtual columns are excluded in the returned row data type. The * data type contains the columns of {@link #toPhysicalRowDataType()} plus persisted metadata * columns. * * @see DataTypes#ROW(Field...) * @see #toRowDataType() * @see #toPhysicalRowDataType() */ public DataType toPersistedRowDataType() { final Field[] fields = columns.stream() .filter(TableColumn::isPersisted) .map(column -> FIELD(column.getName(), column.getType())) .toArray(Field[]::new); // The row should be never null. return ROW(fields).notNull(); } /** * @deprecated Use {@link #toRowDataType()} instead. */ @Deprecated @SuppressWarnings("unchecked") public TypeInformation<Row> toRowType() { return (TypeInformation<Row>) fromDataTypeToLegacyInfo(toRowDataType()); } /** * Returns a list of the watermark specification which contains rowtime attribute and watermark * strategy expression. * * <p>NOTE: Currently, there is at most one {@link WatermarkSpec} in the list, because we don't * support multiple watermarks definition yet. But in the future, we may support multiple * watermarks. */ public List<WatermarkSpec> getWatermarkSpecs() { return watermarkSpecs; } public Optional<UniqueConstraint> getPrimaryKey() { return Optional.ofNullable(primaryKey); } /** Helps to migrate to the new {@link Schema} class. */ public Schema toSchema() { return toSchema(Collections.emptyMap()); } /** Helps to migrate to the new {@link Schema} class, retain comments when needed. */ public Schema toSchema(Map<String, String> comments) { final Schema.Builder builder = Schema.newBuilder(); columns.forEach( column -> { if (column instanceof PhysicalColumn) { final PhysicalColumn c = (PhysicalColumn) column; builder.column(c.getName(), c.getType()); } else if (column instanceof MetadataColumn) { final MetadataColumn c = (MetadataColumn) column; builder.columnByMetadata( c.getName(), c.getType(), c.getMetadataAlias().orElse(null), c.isVirtual()); } else if (column instanceof ComputedColumn) { final ComputedColumn c = (ComputedColumn) column; builder.columnByExpression(c.getName(), c.getExpression()); } else { throw new IllegalArgumentException("Unsupported column type: " + column); } String colName = column.getName(); if (comments.containsKey(colName)) { builder.withComment(comments.get(colName)); } }); watermarkSpecs.forEach( spec -> builder.watermark(spec.getRowtimeAttribute(), spec.getWatermarkExpr())); if (primaryKey != null) { builder.primaryKeyNamed(primaryKey.getName(), primaryKey.getColumns()); } return builder.build(); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("root\n"); for (TableColumn column : columns) { sb.append(" |-- "); sb.append(column.asSummaryString()); sb.append('\n'); } if (!watermarkSpecs.isEmpty()) { for (WatermarkSpec watermarkSpec : watermarkSpecs) { sb.append(" |-- "); sb.append(watermarkSpec.asSummaryString()); sb.append('\n'); } } if (primaryKey != null) { sb.append(" |-- ").append(primaryKey.asSummaryString()); sb.append('\n'); } return sb.toString(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TableSchema that = (TableSchema) o; return Objects.equals(columns, that.columns) && Objects.equals(watermarkSpecs, that.watermarkSpecs) && Objects.equals(primaryKey, that.primaryKey); } @Override public int hashCode() { return Objects.hash(columns, watermarkSpecs, primaryKey); } /** * Creates a table schema from a {@link TypeInformation} instance. If the type information is a * {@link CompositeType}, the field names and types for the composite type are used to construct * the {@link TableSchema} instance. Otherwise, a table schema with a single field is created. * The field name is "f0" and the field type the provided type. * * @param typeInfo The {@link TypeInformation} from which the table schema is generated. * @return The table schema that was generated from the given {@link TypeInformation}. * @deprecated This method will be removed soon. Use {@link DataTypes} to declare types. */ @Deprecated public static TableSchema fromTypeInfo(TypeInformation<?> typeInfo) { if (typeInfo instanceof CompositeType<?>) { final CompositeType<?> compositeType = (CompositeType<?>) typeInfo; // get field names and types from composite type final String[] fieldNames = compositeType.getFieldNames(); final TypeInformation<?>[] fieldTypes = new TypeInformation[fieldNames.length]; for (int i = 0; i < fieldTypes.length; i++) { fieldTypes[i] = compositeType.getTypeAt(i); } return new TableSchema(fieldNames, fieldTypes); } else { // create table schema with a single field named "f0" of the given type. return new TableSchema( new String[] {ATOMIC_TYPE_FIELD_NAME}, new TypeInformation<?>[] {typeInfo}); } } /** Helps to migrate to the new {@link ResolvedSchema} to old API methods. */ public static TableSchema fromResolvedSchema(ResolvedSchema resolvedSchema) { return fromResolvedSchema(resolvedSchema, DefaultSqlFactory.INSTANCE); } public static TableSchema fromResolvedSchema( ResolvedSchema resolvedSchema, SqlFactory sqlFactory) { final TableSchema.Builder builder = TableSchema.builder(); resolvedSchema.getColumns().stream() .map( column -> { if (column instanceof Column.PhysicalColumn) { final Column.PhysicalColumn c = (Column.PhysicalColumn) column; return TableColumn.physical(c.getName(), c.getDataType()); } else if (column instanceof Column.MetadataColumn) { final Column.MetadataColumn c = (Column.MetadataColumn) column; return TableColumn.metadata( c.getName(), c.getDataType(), c.getMetadataKey().orElse(null), c.isVirtual()); } else if (column instanceof Column.ComputedColumn) { final Column.ComputedColumn c = (Column.ComputedColumn) column; return TableColumn.computed( c.getName(), c.getDataType(), c.getExpression().asSerializableString(sqlFactory)); } throw new IllegalArgumentException( "Unsupported column type: " + column); }) .forEach(builder::add); resolvedSchema .getWatermarkSpecs() .forEach( spec -> builder.watermark( spec.getRowtimeAttribute(), spec.getWatermarkExpression() .asSerializableString(sqlFactory), spec.getWatermarkExpression().getOutputDataType())); resolvedSchema .getPrimaryKey() .ifPresent( pk -> builder.primaryKey( pk.getName(), pk.getColumns().toArray(new String[0]))); return builder.build(); } public static Builder builder() { return new Builder(); } // ~ Tools ------------------------------------------------------------------ /** * Validate the field names {@code fieldNames} and field types {@code fieldTypes} have equal * number. * * @param fieldNames Field names * @param fieldTypes Field data types */ private static void validateNameTypeNumberEqual(String[] fieldNames, DataType[] fieldTypes) { if (fieldNames.length != fieldTypes.length) { throw new ValidationException( "Number of field names and field data types must be equal.\n" + "Number of names is " + fieldNames.length + ", number of data types is " + fieldTypes.length + ".\n" + "List of field names: " + Arrays.toString(fieldNames) + "\n" + "List of field data types: " + Arrays.toString(fieldTypes)); } } /** Table column and watermark specification sanity check. */ private static void validateColumnsAndWatermarkSpecs( List<TableColumn> columns, List<WatermarkSpec> watermarkSpecs) { // Validate and create name to type mapping. // Field name to data type mapping, we need this because the row time attribute // field can be nested. // This also check duplicate fields. final Map<String, LogicalType> fieldNameToType = new HashMap<>(); for (TableColumn column : columns) { validateAndCreateNameToTypeMapping( fieldNameToType, column.getName(), column.getType().getLogicalType(), ""); } // Validate watermark and rowtime attribute. for (WatermarkSpec watermark : watermarkSpecs) { String rowtimeAttribute = watermark.getRowtimeAttribute(); LogicalType rowtimeType = Optional.ofNullable(fieldNameToType.get(rowtimeAttribute)) .orElseThrow( () -> new ValidationException( String.format( "Rowtime attribute '%s' is not defined in schema.", rowtimeAttribute))); if (!(rowtimeType.getTypeRoot() == LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE || rowtimeType.getTypeRoot() == LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { throw new ValidationException( String.format( "Rowtime attribute '%s' must be of type TIMESTAMP or TIMESTAMP_LTZ but is of type '%s'.", rowtimeAttribute, rowtimeType)); } LogicalType watermarkOutputType = watermark.getWatermarkExprOutputType().getLogicalType(); if (!canBeTimeAttributeType(watermarkOutputType)) { throw new ValidationException( String.format( "Watermark strategy %s must be of type TIMESTAMP or TIMESTAMP_LTZ but is of type '%s'.", watermark.getWatermarkExpr(), watermarkOutputType.asSummaryString())); } } } private static void validatePrimaryKey(List<TableColumn> columns, UniqueConstraint primaryKey) { Map<String, TableColumn> columnsByNameLookup = columns.stream() .collect(Collectors.toMap(TableColumn::getName, Function.identity())); for (String columnName : primaryKey.getColumns()) { TableColumn column = columnsByNameLookup.get(columnName); if (column == null) { throw new ValidationException( String.format( "Could not create a PRIMARY KEY '%s'. Column '%s' does not exist.", primaryKey.getName(), columnName)); } if (!column.isPhysical()) { throw new ValidationException( String.format( "Could not create a PRIMARY KEY '%s'. Column '%s' is not a physical column.", primaryKey.getName(), columnName)); } if (column.getType().getLogicalType().isNullable()) { throw new ValidationException( String.format( "Could not create a PRIMARY KEY '%s'. Column '%s' is nullable.", primaryKey.getName(), columnName)); } } } /** * Creates a mapping from field name to data type, the field name can be a nested field. This is * mainly used for validating whether the rowtime attribute (might be nested) exists in the * schema. During creating, it also validates whether there is duplicate field names. * * <p>For example, a "f0" field of ROW type has two nested fields "q1" and "q2". Then the * mapping will be ["f0" -> ROW, "f0.q1" -> INT, "f0.q2" -> STRING]. * * <pre>{@code * f0 ROW<q1 INT, q2 STRING> * }</pre> * * @param fieldNameToType Field name to type mapping that to update * @param fieldName Name of this field, e.g. "q1" or "q2" in the above example * @param fieldType Data type of this field * @param parentFieldName Field name of parent type, e.g. "f0" in the above example */ private static void validateAndCreateNameToTypeMapping( Map<String, LogicalType> fieldNameToType, String fieldName, LogicalType fieldType, String parentFieldName) { String fullFieldName = parentFieldName.isEmpty() ? fieldName : parentFieldName + "." + fieldName; LogicalType oldType = fieldNameToType.put(fullFieldName, fieldType); if (oldType != null) { throw new ValidationException( "Field names must be unique. Duplicate field: '" + fullFieldName + "'"); } if (isCompositeType(fieldType) && !(fieldType instanceof LegacyTypeInformationType)) { final List<String> fieldNames = LogicalTypeChecks.getFieldNames(fieldType); final List<LogicalType> fieldTypes = fieldType.getChildren(); IntStream.range(0, fieldNames.size()) .forEach( i -> validateAndCreateNameToTypeMapping( fieldNameToType, fieldNames.get(i), fieldTypes.get(i), fullFieldName)); } } // -------------------------------------------------------------------------------------------- /** Builder for creating a {@link TableSchema}. */ @Internal public static class Builder { private List<TableColumn> columns; private final List<WatermarkSpec> watermarkSpecs; private UniqueConstraint primaryKey; public Builder() { columns = new ArrayList<>(); watermarkSpecs = new ArrayList<>(); } /** * Add a field with name and data type. * * <p>The call order of this method determines the order of fields in the schema. */ public Builder field(String name, DataType dataType) { Preconditions.checkNotNull(name); Preconditions.checkNotNull(dataType); columns.add(TableColumn.physical(name, dataType)); return this; } /** * Add a computed field which is generated by the given expression. This also defines the * field name and the data type. * * <p>The call order of this method determines the order of fields in the schema. * * <p>The returned expression should be a SQL-style expression whose identifiers should be * all quoted and expanded. * * <p>It should be expanded because this expression may be persisted then deserialized from * the catalog, an expanded identifier would avoid the ambiguity if there are same name UDF * referenced from different paths. For example, if there is a UDF named "my_udf" from path * "my_catalog.my_database", you could pass in an expression like * "`my_catalog`.`my_database`.`my_udf`(`f0`) + 1"; * * <p>It should be quoted because user could use a reserved keyword as the identifier, and * we have no idea if it is quoted when deserialize from the catalog, so we force to use * quoted identifier here. But framework will not check whether it is qualified and quoted * or not. * * @param name Field name * @param dataType Field data type * @param expression Computed column expression. */ public Builder field(String name, DataType dataType, String expression) { Preconditions.checkNotNull(name); Preconditions.checkNotNull(dataType); Preconditions.checkNotNull(expression); columns.add(TableColumn.computed(name, dataType, expression)); return this; } /** * Adds a {@link TableColumn} to this builder. * * <p>The call order of this method determines the order of fields in the schema. */ public Builder add(TableColumn column) { columns.add(column); return this; } /** * Add an array of fields with names and data types. * * <p>The call order of this method determines the order of fields in the schema. */ public Builder fields(String[] names, DataType[] dataTypes) { Preconditions.checkNotNull(names); Preconditions.checkNotNull(dataTypes); validateNameTypeNumberEqual(names, dataTypes); List<TableColumn> columns = IntStream.range(0, names.length) .mapToObj(idx -> TableColumn.physical(names[idx], dataTypes[idx])) .collect(Collectors.toList()); this.columns.addAll(columns); return this; } /** * @deprecated This method will be removed in future versions as it uses the old type * system. It is recommended to use {@link #field(String, DataType)} instead which uses * the new type system based on {@link DataTypes}. Please make sure to use either the * old or the new type system consistently to avoid unintended behavior. See the website * documentation for more information. */ @Deprecated public Builder field(String name, TypeInformation<?> typeInfo) { return field(name, fromLegacyInfoToDataType(typeInfo)); } /** * Specifies the previously defined field as an event-time attribute and specifies the * watermark strategy. * * @param rowtimeAttribute the field name as a rowtime attribute, can be a nested field * using dot separator. * @param watermarkExpressionString the string representation of watermark generation * expression, e.g. "ts - INTERVAL '5' SECOND". The string is a qualified SQL expression * string (UDFs are expanded) but will not be validated by {@link TableSchema}. * @param watermarkExprOutputType the data type of the computation result of watermark * generation expression. Whether the data type equals to the output type of expression * will also not be validated by {@link TableSchema}. */ public Builder watermark( String rowtimeAttribute, String watermarkExpressionString, DataType watermarkExprOutputType) { Preconditions.checkNotNull(rowtimeAttribute); Preconditions.checkNotNull(watermarkExpressionString); Preconditions.checkNotNull(watermarkExprOutputType); if (!this.watermarkSpecs.isEmpty()) { throw new IllegalStateException( "Multiple watermark definition is not supported yet."); } this.watermarkSpecs.add( new WatermarkSpec( rowtimeAttribute, watermarkExpressionString, watermarkExprOutputType)); return this; } /** Adds the given {@link WatermarkSpec} to this builder. */ public Builder watermark(WatermarkSpec watermarkSpec) { if (!this.watermarkSpecs.isEmpty()) { throw new IllegalStateException( "Multiple watermark definition is not supported yet."); } this.watermarkSpecs.add(watermarkSpec); return this; } /** * Creates a primary key constraint for a set of given columns. The primary key is * informational only. It will not be enforced. It can be used for optimizations. It is the * owner's of the data responsibility to ensure uniqueness of the data. * * <p>The primary key will be assigned a random name. * * @param columns array of columns that form a unique primary key */ public Builder primaryKey(String... columns) { return primaryKey(UUID.randomUUID().toString(), columns); } /** * Creates a primary key constraint for a set of given columns. The primary key is * informational only. It will not be enforced. It can be used for optimizations. It is the * owner's of the data responsibility to ensure * * @param columns array of columns that form a unique primary key * @param name name for the primary key, can be used to reference the constraint */ public Builder primaryKey(String name, String[] columns) { if (this.primaryKey != null) { throw new ValidationException("Can not create multiple PRIMARY keys."); } if (StringUtils.isNullOrWhitespaceOnly(name)) { throw new ValidationException("PRIMARY KEY's name can not be null or empty."); } if (columns == null || columns.length == 0) { throw new ValidationException( "PRIMARY KEY constraint must be defined for at least a single column."); } this.primaryKey = UniqueConstraint.primaryKey(name, Arrays.asList(columns)); return this; } /** Returns a {@link TableSchema} instance. */ public TableSchema build() { validateColumnsAndWatermarkSpecs(this.columns, this.watermarkSpecs); if (primaryKey != null) { validatePrimaryKey(this.columns, primaryKey); } return new TableSchema(columns, watermarkSpecs, primaryKey); } } }
googleads/google-ads-java
35,696
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/AddOfflineUserDataJobOperationsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/offline_user_data_job_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * Response message for * [OfflineUserDataJobService.AddOfflineUserDataJobOperations][google.ads.googleads.v19.services.OfflineUserDataJobService.AddOfflineUserDataJobOperations]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse} */ public final class AddOfflineUserDataJobOperationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse) AddOfflineUserDataJobOperationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use AddOfflineUserDataJobOperationsResponse.newBuilder() to construct. private AddOfflineUserDataJobOperationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AddOfflineUserDataJobOperationsResponse() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AddOfflineUserDataJobOperationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_AddOfflineUserDataJobOperationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse.class, com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse.Builder.class); } private int bitField0_; public static final int PARTIAL_FAILURE_ERROR_FIELD_NUMBER = 1; private com.google.rpc.Status partialFailureError_; /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return Whether the partialFailureError field is set. */ @java.lang.Override public boolean hasPartialFailureError() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return The partialFailureError. */ @java.lang.Override public com.google.rpc.Status getPartialFailureError() { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } public static final int WARNING_FIELD_NUMBER = 2; private com.google.rpc.Status warning_; /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return Whether the warning field is set. */ @java.lang.Override public boolean hasWarning() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return The warning. */ @java.lang.Override public com.google.rpc.Status getWarning() { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getWarningOrBuilder() { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPartialFailureError()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getWarning()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getPartialFailureError()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getWarning()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse)) { return super.equals(obj); } com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse other = (com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse) obj; if (hasPartialFailureError() != other.hasPartialFailureError()) return false; if (hasPartialFailureError()) { if (!getPartialFailureError() .equals(other.getPartialFailureError())) return false; } if (hasWarning() != other.hasWarning()) return false; if (hasWarning()) { if (!getWarning() .equals(other.getWarning())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPartialFailureError()) { hash = (37 * hash) + PARTIAL_FAILURE_ERROR_FIELD_NUMBER; hash = (53 * hash) + getPartialFailureError().hashCode(); } if (hasWarning()) { hash = (37 * hash) + WARNING_FIELD_NUMBER; hash = (53 * hash) + getWarning().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [OfflineUserDataJobService.AddOfflineUserDataJobOperations][google.ads.googleads.v19.services.OfflineUserDataJobService.AddOfflineUserDataJobOperations]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse) com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_AddOfflineUserDataJobOperationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse.class, com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse.Builder.class); } // Construct using com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPartialFailureErrorFieldBuilder(); getWarningFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; partialFailureError_ = null; if (partialFailureErrorBuilder_ != null) { partialFailureErrorBuilder_.dispose(); partialFailureErrorBuilder_ = null; } warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse build() { com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse buildPartial() { com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse result = new com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.partialFailureError_ = partialFailureErrorBuilder_ == null ? partialFailureError_ : partialFailureErrorBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.warning_ = warningBuilder_ == null ? warning_ : warningBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse) { return mergeFrom((com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse other) { if (other == com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse.getDefaultInstance()) return this; if (other.hasPartialFailureError()) { mergePartialFailureError(other.getPartialFailureError()); } if (other.hasWarning()) { mergeWarning(other.getWarning()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getPartialFailureErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getWarningFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.rpc.Status partialFailureError_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> partialFailureErrorBuilder_; /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return Whether the partialFailureError field is set. */ public boolean hasPartialFailureError() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return The partialFailureError. */ public com.google.rpc.Status getPartialFailureError() { if (partialFailureErrorBuilder_ == null) { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } else { return partialFailureErrorBuilder_.getMessage(); } } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder setPartialFailureError(com.google.rpc.Status value) { if (partialFailureErrorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } partialFailureError_ = value; } else { partialFailureErrorBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder setPartialFailureError( com.google.rpc.Status.Builder builderForValue) { if (partialFailureErrorBuilder_ == null) { partialFailureError_ = builderForValue.build(); } else { partialFailureErrorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder mergePartialFailureError(com.google.rpc.Status value) { if (partialFailureErrorBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && partialFailureError_ != null && partialFailureError_ != com.google.rpc.Status.getDefaultInstance()) { getPartialFailureErrorBuilder().mergeFrom(value); } else { partialFailureError_ = value; } } else { partialFailureErrorBuilder_.mergeFrom(value); } if (partialFailureError_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder clearPartialFailureError() { bitField0_ = (bitField0_ & ~0x00000001); partialFailureError_ = null; if (partialFailureErrorBuilder_ != null) { partialFailureErrorBuilder_.dispose(); partialFailureErrorBuilder_ = null; } onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public com.google.rpc.Status.Builder getPartialFailureErrorBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPartialFailureErrorFieldBuilder().getBuilder(); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() { if (partialFailureErrorBuilder_ != null) { return partialFailureErrorBuilder_.getMessageOrBuilder(); } else { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getPartialFailureErrorFieldBuilder() { if (partialFailureErrorBuilder_ == null) { partialFailureErrorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>( getPartialFailureError(), getParentForChildren(), isClean()); partialFailureError_ = null; } return partialFailureErrorBuilder_; } private com.google.rpc.Status warning_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> warningBuilder_; /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return Whether the warning field is set. */ public boolean hasWarning() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return The warning. */ public com.google.rpc.Status getWarning() { if (warningBuilder_ == null) { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } else { return warningBuilder_.getMessage(); } } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder setWarning(com.google.rpc.Status value) { if (warningBuilder_ == null) { if (value == null) { throw new NullPointerException(); } warning_ = value; } else { warningBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder setWarning( com.google.rpc.Status.Builder builderForValue) { if (warningBuilder_ == null) { warning_ = builderForValue.build(); } else { warningBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder mergeWarning(com.google.rpc.Status value) { if (warningBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && warning_ != null && warning_ != com.google.rpc.Status.getDefaultInstance()) { getWarningBuilder().mergeFrom(value); } else { warning_ = value; } } else { warningBuilder_.mergeFrom(value); } if (warning_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder clearWarning() { bitField0_ = (bitField0_ & ~0x00000002); warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public com.google.rpc.Status.Builder getWarningBuilder() { bitField0_ |= 0x00000002; onChanged(); return getWarningFieldBuilder().getBuilder(); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public com.google.rpc.StatusOrBuilder getWarningOrBuilder() { if (warningBuilder_ != null) { return warningBuilder_.getMessageOrBuilder(); } else { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getWarningFieldBuilder() { if (warningBuilder_ == null) { warningBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>( getWarning(), getParentForChildren(), isClean()); warning_ = null; } return warningBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse) private static final com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse(); } public static com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> PARSER = new com.google.protobuf.AbstractParser<AddOfflineUserDataJobOperationsResponse>() { @java.lang.Override public AddOfflineUserDataJobOperationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.AddOfflineUserDataJobOperationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,696
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/AddOfflineUserDataJobOperationsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/offline_user_data_job_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * Response message for * [OfflineUserDataJobService.AddOfflineUserDataJobOperations][google.ads.googleads.v20.services.OfflineUserDataJobService.AddOfflineUserDataJobOperations]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse} */ public final class AddOfflineUserDataJobOperationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse) AddOfflineUserDataJobOperationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use AddOfflineUserDataJobOperationsResponse.newBuilder() to construct. private AddOfflineUserDataJobOperationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AddOfflineUserDataJobOperationsResponse() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AddOfflineUserDataJobOperationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_AddOfflineUserDataJobOperationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse.class, com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse.Builder.class); } private int bitField0_; public static final int PARTIAL_FAILURE_ERROR_FIELD_NUMBER = 1; private com.google.rpc.Status partialFailureError_; /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return Whether the partialFailureError field is set. */ @java.lang.Override public boolean hasPartialFailureError() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return The partialFailureError. */ @java.lang.Override public com.google.rpc.Status getPartialFailureError() { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } public static final int WARNING_FIELD_NUMBER = 2; private com.google.rpc.Status warning_; /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return Whether the warning field is set. */ @java.lang.Override public boolean hasWarning() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return The warning. */ @java.lang.Override public com.google.rpc.Status getWarning() { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getWarningOrBuilder() { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPartialFailureError()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getWarning()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getPartialFailureError()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getWarning()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse)) { return super.equals(obj); } com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse other = (com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse) obj; if (hasPartialFailureError() != other.hasPartialFailureError()) return false; if (hasPartialFailureError()) { if (!getPartialFailureError() .equals(other.getPartialFailureError())) return false; } if (hasWarning() != other.hasWarning()) return false; if (hasWarning()) { if (!getWarning() .equals(other.getWarning())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPartialFailureError()) { hash = (37 * hash) + PARTIAL_FAILURE_ERROR_FIELD_NUMBER; hash = (53 * hash) + getPartialFailureError().hashCode(); } if (hasWarning()) { hash = (37 * hash) + WARNING_FIELD_NUMBER; hash = (53 * hash) + getWarning().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [OfflineUserDataJobService.AddOfflineUserDataJobOperations][google.ads.googleads.v20.services.OfflineUserDataJobService.AddOfflineUserDataJobOperations]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse) com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_AddOfflineUserDataJobOperationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse.class, com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse.Builder.class); } // Construct using com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPartialFailureErrorFieldBuilder(); getWarningFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; partialFailureError_ = null; if (partialFailureErrorBuilder_ != null) { partialFailureErrorBuilder_.dispose(); partialFailureErrorBuilder_ = null; } warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse build() { com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse buildPartial() { com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse result = new com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.partialFailureError_ = partialFailureErrorBuilder_ == null ? partialFailureError_ : partialFailureErrorBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.warning_ = warningBuilder_ == null ? warning_ : warningBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse) { return mergeFrom((com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse other) { if (other == com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse.getDefaultInstance()) return this; if (other.hasPartialFailureError()) { mergePartialFailureError(other.getPartialFailureError()); } if (other.hasWarning()) { mergeWarning(other.getWarning()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getPartialFailureErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getWarningFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.rpc.Status partialFailureError_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> partialFailureErrorBuilder_; /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return Whether the partialFailureError field is set. */ public boolean hasPartialFailureError() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return The partialFailureError. */ public com.google.rpc.Status getPartialFailureError() { if (partialFailureErrorBuilder_ == null) { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } else { return partialFailureErrorBuilder_.getMessage(); } } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder setPartialFailureError(com.google.rpc.Status value) { if (partialFailureErrorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } partialFailureError_ = value; } else { partialFailureErrorBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder setPartialFailureError( com.google.rpc.Status.Builder builderForValue) { if (partialFailureErrorBuilder_ == null) { partialFailureError_ = builderForValue.build(); } else { partialFailureErrorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder mergePartialFailureError(com.google.rpc.Status value) { if (partialFailureErrorBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && partialFailureError_ != null && partialFailureError_ != com.google.rpc.Status.getDefaultInstance()) { getPartialFailureErrorBuilder().mergeFrom(value); } else { partialFailureError_ = value; } } else { partialFailureErrorBuilder_.mergeFrom(value); } if (partialFailureError_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder clearPartialFailureError() { bitField0_ = (bitField0_ & ~0x00000001); partialFailureError_ = null; if (partialFailureErrorBuilder_ != null) { partialFailureErrorBuilder_.dispose(); partialFailureErrorBuilder_ = null; } onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public com.google.rpc.Status.Builder getPartialFailureErrorBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPartialFailureErrorFieldBuilder().getBuilder(); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() { if (partialFailureErrorBuilder_ != null) { return partialFailureErrorBuilder_.getMessageOrBuilder(); } else { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getPartialFailureErrorFieldBuilder() { if (partialFailureErrorBuilder_ == null) { partialFailureErrorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>( getPartialFailureError(), getParentForChildren(), isClean()); partialFailureError_ = null; } return partialFailureErrorBuilder_; } private com.google.rpc.Status warning_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> warningBuilder_; /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return Whether the warning field is set. */ public boolean hasWarning() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return The warning. */ public com.google.rpc.Status getWarning() { if (warningBuilder_ == null) { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } else { return warningBuilder_.getMessage(); } } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder setWarning(com.google.rpc.Status value) { if (warningBuilder_ == null) { if (value == null) { throw new NullPointerException(); } warning_ = value; } else { warningBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder setWarning( com.google.rpc.Status.Builder builderForValue) { if (warningBuilder_ == null) { warning_ = builderForValue.build(); } else { warningBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder mergeWarning(com.google.rpc.Status value) { if (warningBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && warning_ != null && warning_ != com.google.rpc.Status.getDefaultInstance()) { getWarningBuilder().mergeFrom(value); } else { warning_ = value; } } else { warningBuilder_.mergeFrom(value); } if (warning_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder clearWarning() { bitField0_ = (bitField0_ & ~0x00000002); warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public com.google.rpc.Status.Builder getWarningBuilder() { bitField0_ |= 0x00000002; onChanged(); return getWarningFieldBuilder().getBuilder(); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public com.google.rpc.StatusOrBuilder getWarningOrBuilder() { if (warningBuilder_ != null) { return warningBuilder_.getMessageOrBuilder(); } else { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getWarningFieldBuilder() { if (warningBuilder_ == null) { warningBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>( getWarning(), getParentForChildren(), isClean()); warning_ = null; } return warningBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse) private static final com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse(); } public static com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> PARSER = new com.google.protobuf.AbstractParser<AddOfflineUserDataJobOperationsResponse>() { @java.lang.Override public AddOfflineUserDataJobOperationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.AddOfflineUserDataJobOperationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,696
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/AddOfflineUserDataJobOperationsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/offline_user_data_job_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * Response message for * [OfflineUserDataJobService.AddOfflineUserDataJobOperations][google.ads.googleads.v21.services.OfflineUserDataJobService.AddOfflineUserDataJobOperations]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse} */ public final class AddOfflineUserDataJobOperationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse) AddOfflineUserDataJobOperationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use AddOfflineUserDataJobOperationsResponse.newBuilder() to construct. private AddOfflineUserDataJobOperationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AddOfflineUserDataJobOperationsResponse() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AddOfflineUserDataJobOperationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_AddOfflineUserDataJobOperationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse.class, com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse.Builder.class); } private int bitField0_; public static final int PARTIAL_FAILURE_ERROR_FIELD_NUMBER = 1; private com.google.rpc.Status partialFailureError_; /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return Whether the partialFailureError field is set. */ @java.lang.Override public boolean hasPartialFailureError() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return The partialFailureError. */ @java.lang.Override public com.google.rpc.Status getPartialFailureError() { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } public static final int WARNING_FIELD_NUMBER = 2; private com.google.rpc.Status warning_; /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return Whether the warning field is set. */ @java.lang.Override public boolean hasWarning() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return The warning. */ @java.lang.Override public com.google.rpc.Status getWarning() { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getWarningOrBuilder() { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPartialFailureError()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getWarning()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getPartialFailureError()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getWarning()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse)) { return super.equals(obj); } com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse other = (com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse) obj; if (hasPartialFailureError() != other.hasPartialFailureError()) return false; if (hasPartialFailureError()) { if (!getPartialFailureError() .equals(other.getPartialFailureError())) return false; } if (hasWarning() != other.hasWarning()) return false; if (hasWarning()) { if (!getWarning() .equals(other.getWarning())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPartialFailureError()) { hash = (37 * hash) + PARTIAL_FAILURE_ERROR_FIELD_NUMBER; hash = (53 * hash) + getPartialFailureError().hashCode(); } if (hasWarning()) { hash = (37 * hash) + WARNING_FIELD_NUMBER; hash = (53 * hash) + getWarning().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [OfflineUserDataJobService.AddOfflineUserDataJobOperations][google.ads.googleads.v21.services.OfflineUserDataJobService.AddOfflineUserDataJobOperations]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse) com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_AddOfflineUserDataJobOperationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse.class, com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse.Builder.class); } // Construct using com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPartialFailureErrorFieldBuilder(); getWarningFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; partialFailureError_ = null; if (partialFailureErrorBuilder_ != null) { partialFailureErrorBuilder_.dispose(); partialFailureErrorBuilder_ = null; } warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_AddOfflineUserDataJobOperationsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse build() { com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse buildPartial() { com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse result = new com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.partialFailureError_ = partialFailureErrorBuilder_ == null ? partialFailureError_ : partialFailureErrorBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.warning_ = warningBuilder_ == null ? warning_ : warningBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse) { return mergeFrom((com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse other) { if (other == com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse.getDefaultInstance()) return this; if (other.hasPartialFailureError()) { mergePartialFailureError(other.getPartialFailureError()); } if (other.hasWarning()) { mergeWarning(other.getWarning()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getPartialFailureErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getWarningFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.rpc.Status partialFailureError_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> partialFailureErrorBuilder_; /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return Whether the partialFailureError field is set. */ public boolean hasPartialFailureError() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> * @return The partialFailureError. */ public com.google.rpc.Status getPartialFailureError() { if (partialFailureErrorBuilder_ == null) { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } else { return partialFailureErrorBuilder_.getMessage(); } } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder setPartialFailureError(com.google.rpc.Status value) { if (partialFailureErrorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } partialFailureError_ = value; } else { partialFailureErrorBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder setPartialFailureError( com.google.rpc.Status.Builder builderForValue) { if (partialFailureErrorBuilder_ == null) { partialFailureError_ = builderForValue.build(); } else { partialFailureErrorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder mergePartialFailureError(com.google.rpc.Status value) { if (partialFailureErrorBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && partialFailureError_ != null && partialFailureError_ != com.google.rpc.Status.getDefaultInstance()) { getPartialFailureErrorBuilder().mergeFrom(value); } else { partialFailureError_ = value; } } else { partialFailureErrorBuilder_.mergeFrom(value); } if (partialFailureError_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public Builder clearPartialFailureError() { bitField0_ = (bitField0_ & ~0x00000001); partialFailureError_ = null; if (partialFailureErrorBuilder_ != null) { partialFailureErrorBuilder_.dispose(); partialFailureErrorBuilder_ = null; } onChanged(); return this; } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public com.google.rpc.Status.Builder getPartialFailureErrorBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPartialFailureErrorFieldBuilder().getBuilder(); } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() { if (partialFailureErrorBuilder_ != null) { return partialFailureErrorBuilder_.getMessageOrBuilder(); } else { return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_; } } /** * <pre> * Errors that pertain to operation failures in the partial failure mode. * Returned only when partial_failure = true and all errors occur inside the * operations. If any errors occur outside the operations (for example, auth * errors), we return an RPC level error. * </pre> * * <code>.google.rpc.Status partial_failure_error = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getPartialFailureErrorFieldBuilder() { if (partialFailureErrorBuilder_ == null) { partialFailureErrorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>( getPartialFailureError(), getParentForChildren(), isClean()); partialFailureError_ = null; } return partialFailureErrorBuilder_; } private com.google.rpc.Status warning_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> warningBuilder_; /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return Whether the warning field is set. */ public boolean hasWarning() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> * @return The warning. */ public com.google.rpc.Status getWarning() { if (warningBuilder_ == null) { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } else { return warningBuilder_.getMessage(); } } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder setWarning(com.google.rpc.Status value) { if (warningBuilder_ == null) { if (value == null) { throw new NullPointerException(); } warning_ = value; } else { warningBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder setWarning( com.google.rpc.Status.Builder builderForValue) { if (warningBuilder_ == null) { warning_ = builderForValue.build(); } else { warningBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder mergeWarning(com.google.rpc.Status value) { if (warningBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && warning_ != null && warning_ != com.google.rpc.Status.getDefaultInstance()) { getWarningBuilder().mergeFrom(value); } else { warning_ = value; } } else { warningBuilder_.mergeFrom(value); } if (warning_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public Builder clearWarning() { bitField0_ = (bitField0_ & ~0x00000002); warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } onChanged(); return this; } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public com.google.rpc.Status.Builder getWarningBuilder() { bitField0_ |= 0x00000002; onChanged(); return getWarningFieldBuilder().getBuilder(); } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ public com.google.rpc.StatusOrBuilder getWarningOrBuilder() { if (warningBuilder_ != null) { return warningBuilder_.getMessageOrBuilder(); } else { return warning_ == null ? com.google.rpc.Status.getDefaultInstance() : warning_; } } /** * <pre> * Non blocking errors that pertain to operation failures in the warnings * mode. Returned only when enable_warnings = true. * </pre> * * <code>.google.rpc.Status warning = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getWarningFieldBuilder() { if (warningBuilder_ == null) { warningBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>( getWarning(), getParentForChildren(), isClean()); warning_ = null; } return warningBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse) private static final com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse(); } public static com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> PARSER = new com.google.protobuf.AbstractParser<AddOfflineUserDataJobOperationsResponse>() { @java.lang.Override public AddOfflineUserDataJobOperationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AddOfflineUserDataJobOperationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.AddOfflineUserDataJobOperationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,361
java-network-management/proto-google-cloud-network-management-v1/src/main/java/com/google/cloud/networkmanagement/v1/CloudRunRevisionInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/networkmanagement/v1/trace.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.networkmanagement.v1; /** * * * <pre> * For display only. Metadata associated with a Cloud Run revision. * </pre> * * Protobuf type {@code google.cloud.networkmanagement.v1.CloudRunRevisionInfo} */ public final class CloudRunRevisionInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.networkmanagement.v1.CloudRunRevisionInfo) CloudRunRevisionInfoOrBuilder { private static final long serialVersionUID = 0L; // Use CloudRunRevisionInfo.newBuilder() to construct. private CloudRunRevisionInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CloudRunRevisionInfo() { displayName_ = ""; uri_ = ""; location_ = ""; serviceUri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CloudRunRevisionInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkmanagement.v1.TraceProto .internal_static_google_cloud_networkmanagement_v1_CloudRunRevisionInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkmanagement.v1.TraceProto .internal_static_google_cloud_networkmanagement_v1_CloudRunRevisionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo.class, com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo.Builder.class); } public static final int DISPLAY_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object displayName_ = ""; /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The displayName. */ @java.lang.Override public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The bytes for displayName. */ @java.lang.Override public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int URI_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object uri_ = ""; /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LOCATION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object location_ = ""; /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The location. */ @java.lang.Override public java.lang.String getLocation() { java.lang.Object ref = location_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); location_ = s; return s; } } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The bytes for location. */ @java.lang.Override public com.google.protobuf.ByteString getLocationBytes() { java.lang.Object ref = location_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); location_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SERVICE_URI_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object serviceUri_ = ""; /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The serviceUri. */ @java.lang.Override public java.lang.String getServiceUri() { java.lang.Object ref = serviceUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceUri_ = s; return s; } } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The bytes for serviceUri. */ @java.lang.Override public com.google.protobuf.ByteString getServiceUriBytes() { java.lang.Object ref = serviceUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); serviceUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, displayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, location_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, serviceUri_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, displayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, location_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, serviceUri_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo)) { return super.equals(obj); } com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo other = (com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo) obj; if (!getDisplayName().equals(other.getDisplayName())) return false; if (!getUri().equals(other.getUri())) return false; if (!getLocation().equals(other.getLocation())) return false; if (!getServiceUri().equals(other.getServiceUri())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getDisplayName().hashCode(); hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); hash = (37 * hash) + LOCATION_FIELD_NUMBER; hash = (53 * hash) + getLocation().hashCode(); hash = (37 * hash) + SERVICE_URI_FIELD_NUMBER; hash = (53 * hash) + getServiceUri().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * For display only. Metadata associated with a Cloud Run revision. * </pre> * * Protobuf type {@code google.cloud.networkmanagement.v1.CloudRunRevisionInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.networkmanagement.v1.CloudRunRevisionInfo) com.google.cloud.networkmanagement.v1.CloudRunRevisionInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkmanagement.v1.TraceProto .internal_static_google_cloud_networkmanagement_v1_CloudRunRevisionInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkmanagement.v1.TraceProto .internal_static_google_cloud_networkmanagement_v1_CloudRunRevisionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo.class, com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo.Builder.class); } // Construct using com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; displayName_ = ""; uri_ = ""; location_ = ""; serviceUri_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.networkmanagement.v1.TraceProto .internal_static_google_cloud_networkmanagement_v1_CloudRunRevisionInfo_descriptor; } @java.lang.Override public com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo getDefaultInstanceForType() { return com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo.getDefaultInstance(); } @java.lang.Override public com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo build() { com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo buildPartial() { com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo result = new com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.displayName_ = displayName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.uri_ = uri_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.location_ = location_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.serviceUri_ = serviceUri_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo) { return mergeFrom((com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo other) { if (other == com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo.getDefaultInstance()) return this; if (!other.getDisplayName().isEmpty()) { displayName_ = other.displayName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getUri().isEmpty()) { uri_ = other.uri_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getLocation().isEmpty()) { location_ = other.location_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getServiceUri().isEmpty()) { serviceUri_ = other.serviceUri_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { displayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { uri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { location_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 case 42: { serviceUri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object displayName_ = ""; /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The displayName. */ public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The bytes for displayName. */ public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @param value The displayName to set. * @return This builder for chaining. */ public Builder setDisplayName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } displayName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return This builder for chaining. */ public Builder clearDisplayName() { displayName_ = getDefaultInstance().getDisplayName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @param value The bytes for displayName to set. * @return This builder for chaining. */ public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); displayName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object uri_ = ""; /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object location_ = ""; /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The location. */ public java.lang.String getLocation() { java.lang.Object ref = location_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); location_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The bytes for location. */ public com.google.protobuf.ByteString getLocationBytes() { java.lang.Object ref = location_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); location_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @param value The location to set. * @return This builder for chaining. */ public Builder setLocation(java.lang.String value) { if (value == null) { throw new NullPointerException(); } location_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return This builder for chaining. */ public Builder clearLocation() { location_ = getDefaultInstance().getLocation(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @param value The bytes for location to set. * @return This builder for chaining. */ public Builder setLocationBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); location_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object serviceUri_ = ""; /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The serviceUri. */ public java.lang.String getServiceUri() { java.lang.Object ref = serviceUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The bytes for serviceUri. */ public com.google.protobuf.ByteString getServiceUriBytes() { java.lang.Object ref = serviceUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); serviceUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @param value The serviceUri to set. * @return This builder for chaining. */ public Builder setServiceUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } serviceUri_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return This builder for chaining. */ public Builder clearServiceUri() { serviceUri_ = getDefaultInstance().getServiceUri(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @param value The bytes for serviceUri to set. * @return This builder for chaining. */ public Builder setServiceUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); serviceUri_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.networkmanagement.v1.CloudRunRevisionInfo) } // @@protoc_insertion_point(class_scope:google.cloud.networkmanagement.v1.CloudRunRevisionInfo) private static final com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo(); } public static com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CloudRunRevisionInfo> PARSER = new com.google.protobuf.AbstractParser<CloudRunRevisionInfo>() { @java.lang.Override public CloudRunRevisionInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CloudRunRevisionInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CloudRunRevisionInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.networkmanagement.v1.CloudRunRevisionInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/sis
35,768
endorsed/src/org.apache.sis.storage/main/org/apache/sis/storage/image/WorldFileStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.storage.image; import java.util.Arrays; import java.util.Collection; import java.util.Map; import java.util.HashMap; import java.util.Optional; import java.io.Closeable; import java.io.IOException; import java.io.EOFException; import java.io.FileNotFoundException; import java.io.UncheckedIOException; import java.nio.file.Path; import java.nio.file.NoSuchFileException; import java.nio.file.StandardOpenOption; import java.net.URISyntaxException; import javax.imageio.ImageIO; import javax.imageio.ImageReader; import javax.imageio.spi.ImageReaderSpi; import javax.imageio.stream.ImageInputStream; import org.opengis.metadata.Metadata; import org.opengis.metadata.maintenance.ScopeCode; import org.apache.sis.coverage.grid.PixelInCell; import org.apache.sis.coverage.grid.GridExtent; import org.apache.sis.coverage.grid.GridGeometry; import org.apache.sis.io.stream.IOUtilities; import org.apache.sis.storage.Resource; import org.apache.sis.storage.Aggregate; import org.apache.sis.storage.StorageConnector; import org.apache.sis.storage.GridCoverageResource; import org.apache.sis.storage.DataStoreException; import org.apache.sis.storage.DataStoreClosedException; import org.apache.sis.storage.DataStoreContentException; import org.apache.sis.storage.ReadOnlyStorageException; import org.apache.sis.storage.UnsupportedStorageException; import org.apache.sis.storage.modifier.CoverageModifier; import org.apache.sis.storage.base.PRJDataStore; import org.apache.sis.storage.base.MetadataBuilder; import org.apache.sis.storage.base.AuxiliaryContent; import org.apache.sis.referencing.internal.shared.AffineTransform2D; import org.apache.sis.util.CharSequences; import org.apache.sis.util.ArraysExt; import org.apache.sis.util.internal.shared.ListOfUnknownSize; import org.apache.sis.util.collection.BackingStoreException; import org.apache.sis.util.resources.Errors; import org.apache.sis.setup.OptionKey; /** * A data store which creates grid coverages from Image I/O readers using <i>World File</i> convention. * Georeferencing is defined by two auxiliary files having the same name as the image file but different suffixes: * * <ul class="verbose"> * <li>A text file containing the coefficients of the affine transform mapping pixel coordinates to geodesic coordinates. * The reader expects one coefficient per line, in the same order as the order expected by the * {@link java.awt.geom.AffineTransform#AffineTransform(double[]) AffineTransform(double[])} constructor, which is * <var>scaleX</var>, <var>shearY</var>, <var>shearX</var>, <var>scaleY</var>, <var>translateX</var>, <var>translateY</var>. * The reader looks for a file having the following suffixes, in preference order: * <ol> * <li>The first letter of the image file extension, followed by the last letter of * the image file extension, followed by {@code 'w'}. Example: {@code "tfw"} for * {@code "tiff"} images, and {@code "jgw"} for {@code "jpeg"} images.</li> * <li>The extension of the image file with a {@code 'w'} appended.</li> * <li>The {@code "wld"} extension.</li> * </ol> * </li> * <li>A text file containing the <i>Coordinate Reference System</i> (CRS) definition * in <i>Well Known Text</i> (WKT) syntax. * The reader looks for a file having the {@code ".prj"} extension.</li> * </ul> * * Every auxiliary text file are expected to be encoded in UTF-8 * and every numbers are expected to be formatted in US locale. * * <h2>Type of input objects</h2> * The {@link StorageConnector} input should be an instance of the following types: * {@link java.nio.file.Path}, {@link java.io.File}, {@link java.net.URL} or {@link java.net.URI}. * Other types such as {@link ImageInputStream} are also accepted but in those cases the auxiliary files cannot be read. * For any input of unknown type, this data store first checks if an {@link ImageReader} accepts the input type directly. * If none is found, this data store tries to {@linkplain ImageIO#createImageInputStream(Object) create an input stream} * from the input object. * * <p>The storage input object may also be an {@link ImageReader} instance ready for use * (i.e. with its {@linkplain ImageReader#setInput(Object) input set} to a non-null value). * In that case, this data store will use the given image reader as-is. * The image reader will be {@linkplain ImageReader#dispose() disposed} * and its input closed (if {@link AutoCloseable}) when this data store is {@linkplain #close() closed}.</p> * * <h2>Handling of multi-image files</h2> * Because some image formats can store an arbitrary number of images, * this data store is considered as an aggregate with one resource per image. * All image should have the same size and all resources will share the same {@link GridGeometry}. * However, this base class does not implement the {@link Aggregate} interface directly in order to * give a chance to subclasses to implement {@link GridCoverageResource} directly when the format * is known to support only one image per file. * * @author Martin Desruisseaux (Geomatys) */ public class WorldFileStore extends PRJDataStore { /** * Image I/O format names (ignoring case) for which we have an entry in the {@code SpatialMetadata} database. * * @see <a href="https://issues.apache.org/jira/browse/SIS-300">SIS-300 — Complete the information provided in Citations constants</a> */ private static final String[] KNOWN_FORMATS = { "PNG" }; /** * Index of the main image. This is relevant only with formats capable to store an arbitrary number of images. * Current implementation assumes that the main image is always the first one, but it may become configurable * in a future version if useful. * * @see #width * @see #height */ static final int MAIN_IMAGE = 0; /** * The default World File suffix when it cannot be determined from {@link #location}. * This is a GDAL convention. */ private static final String DEFAULT_SUFFIX = "wld"; /** * The "cell center" versus "cell corner" interpretation of translation coefficients. * The ESRI specification said that the coefficients map to pixel center. */ static final PixelInCell CELL_ANCHOR = PixelInCell.CELL_CENTER; /** * The filename extension (may be an empty string), or {@code null} if unknown. * It does not include the leading dot. */ final String suffix; /** * The filename extension for the auxiliary "world file". * For the TIFF format, this is typically {@code "tfw"}. * This is computed as a side-effect of {@link #readWorldFile()}. */ private String suffixWLD; /** * The image reader, set by the constructor and cleared when the store is closed. * May also be null if the store is initially write-only, in which case a reader * may be created the first time than an image is read. * * @see #reader() */ private volatile ImageReader reader; /** * The object to close when {@code WorldFileStore} is closed. It may be a different object than * reader input or writer output, because some {@link ImageInputStream#close()} implementations * in the standard Java {@link javax.imageio.stream} package do not close the underlying stream. * * <p>The type is {@link Closeable} instead of {@link AutoCloseable} because the former is idempotent: * invoking {@link Closeable#close()} many times has no effect. By contrast {@link AutoCloseable} does * not offer this guarantee. Because it is hard to know what {@link ImageInputStream#close()} will do, * we need idempotent {@code toClose} for safety. Note that {@link ImageInputStream#close()} violates * the idempotent contract of {@link Closeable#close()}, so an additional check will be necessary in * our {@link #close()} implementation.</p> * * @see javax.imageio.stream.FileCacheImageInputStream#close() * @see javax.imageio.stream.FileCacheImageOutputStream#close() * @see javax.imageio.stream.MemoryCacheImageInputStream#close() * @see javax.imageio.stream.MemoryCacheImageOutputStream#close() */ private Closeable toClose; /** * Width and height of the main image. * The {@link #gridGeometry} is assumed valid only for images having this size. * * @see #MAIN_IMAGE * @see #gridGeometry */ private int width, height; /** * The conversion from pixel center to CRS, or {@code null} if none or not yet computed. * The grid extent has the size given by {@link #width} and {@link #height}. * * @see #crs * @see #width * @see #height * @see #getGridGeometry(int) */ private GridGeometry gridGeometry; /** * All images in this resource, created when first needed. * Elements in this list will also be created when first needed. * * @see #components() */ private Components components; /** * The metadata object, or {@code null} if not yet created. * * @see #getMetadata() */ private Metadata metadata; /** * Identifiers used by a resource. Identifiers must be unique in the data store, * so after an identifier has been used it cannot be reused anymore even if the * resource having that identifier has been removed. * Values associated to identifiers tell whether the resource still exist. * * @see WorldFileResource#getIdentifier() */ final Map<String,Boolean> identifiers; /** * The user-specified method for customizing the band definitions. Never {@code null}. */ final CoverageModifier customizer; /** * Creates a new store from the given file, URL or stream. * * @param format information about the storage (URL, stream, <i>etc</i>) and the reader/writer to use. * @throws DataStoreException if an error occurred while opening the stream. * @throws IOException if an error occurred while creating the image reader instance. */ WorldFileStore(final FormatFinder format) throws DataStoreException, IOException { super(format.provider, format.connector); listeners.useReadOnlyEvents(); identifiers = new HashMap<>(); customizer = CoverageModifier.getOrDefault(format.connector); suffix = format.suffix; if (format.storage instanceof Closeable) { toClose = (Closeable) format.storage; } if (!format.openAsWriter) { reader = format.getOrCreateReader(); if (reader == null) { throw new UnsupportedStorageException(super.getLocale(), WorldFileStoreProvider.NAME, format.storage, format.connector.getOption(OptionKey.OPEN_OPTIONS)); } configureReader(); /* * Do not invoke any method that may cause the image reader to start reading the stream, * because the `WritableStore` subclass will want to save the initial stream position. */ } } /** * Sets the locale to use for warning messages, if supported. If the reader * does not support the locale, the reader's default locale will be used. */ private void configureReader() { @SuppressWarnings("LocalVariableHidesMemberVariable") final ImageReader reader = this.reader; try { reader.setLocale(listeners.getLocale()); } catch (IllegalArgumentException e) { // Ignore } reader.addIIOReadWarningListener(new WarningListener(listeners)); } /** * Returns the preferred suffix for the auxiliary world file. For TIFF images, this is {@code "tfw"}. * This method tries to use the same case (lower-case or upper-case) than the suffix of the main file. */ private String getWorldFileSuffix() { if (suffix != null) { final int length = suffix.length(); if (suffix.codePointCount(0, length) >= 2) { boolean lower = true; for (int i = length; i > 0;) { final int c = suffix.codePointBefore(i); lower = Character.isLowerCase(c); if ( lower) break; lower = !Character.isUpperCase(c); if (!lower) break; i -= Character.charCount(c); } // If the case cannot be determined, `lower` will default to `true`. return new StringBuilder(3) .appendCodePoint(suffix.codePointAt(0)) .appendCodePoint(suffix.codePointBefore(length)) .append(lower ? 'w' : 'W').toString(); } } return DEFAULT_SUFFIX; } /** * Reads the "World file" by searching for an auxiliary file with a suffix inferred from * the suffix of the main file. This method tries suffixes with the following conventions, * in preference order. * * <ol> * <li>First letter of main file suffix, followed by last letter, followed by {@code 'w'}.</li> * <li>Full suffix of the main file followed by {@code 'w'}.</li> * <li>{@value #DEFAULT_SUFFIX}.</li> * </ol> * * @return the "World file" content as an affine transform, or {@code null} if none was found. * @throws URISyntaxException if an error occurred while normalizing the URI. * @throws IOException if an I/O error occurred. * @throws DataStoreException if the auxiliary file content cannot be parsed. */ private AffineTransform2D readWorldFile() throws URISyntaxException, IOException, DataStoreException { IOException warning = null; final String preferred = getWorldFileSuffix(); loop: for (int convention=0;; convention++) { final String wld; switch (convention) { default: break loop; case 0: wld = preferred; break; // First file suffix to search. case 2: wld = DEFAULT_SUFFIX; break; // File suffix to search in last resort. case 1: { if (preferred.equals(DEFAULT_SUFFIX)) break loop; wld = suffix + preferred.charAt(preferred.length() - 1); break; } } try { return readWorldFile(wld); } catch (NoSuchFileException | FileNotFoundException e) { if (warning == null) { warning = e; } else { warning.addSuppressed(e); } } } if (warning != null) { cannotReadAuxiliaryFile(WorldFileStore.class, "getGridGeometry", preferred, warning, true); } return null; } /** * Reads the "World file" by parsing an auxiliary file with the given suffix. * * @param wld suffix of the auxiliary file. * @return the "World file" content as an affine transform, or {@code null} if none was found. * @throws URISyntaxException if an error occurred while normalizing the URI. * @throws IOException if an I/O error occurred. * @throws DataStoreException if the file content cannot be parsed. */ private AffineTransform2D readWorldFile(final String wld) throws URISyntaxException, IOException, DataStoreException { final AuxiliaryContent content = readAuxiliaryFile(wld, false); if (content == null) { cannotReadAuxiliaryFile(WorldFileStore.class, "getGridGeometry", wld, null, true); return null; } final String filename = content.getFilename(); final CharSequence[] lines = CharSequences.splitOnEOL(content); final int expected = 6; // Expected number of elements. int count = 0; // Actual number of elements. final double[] elements = new double[expected]; for (int i=0; i<expected; i++) { final String line = lines[i].toString().trim(); if (!line.isEmpty() && line.charAt(0) != '#') { if (count >= expected) { throw new DataStoreContentException(errors().getString(Errors.Keys.TooManyOccurrences_2, expected, "coefficient")); } try { elements[count++] = Double.parseDouble(line); } catch (NumberFormatException e) { throw new DataStoreContentException(errors().getString(Errors.Keys.ErrorInFileAtLine_2, filename, i), e); } } } if (count != expected) { throw new EOFException(errors().getString(Errors.Keys.UnexpectedEndOfFile_1, filename)); } if (filename != null) { final int s = filename.lastIndexOf(IOUtilities.EXTENSION_SEPARATOR); if (s >= 0) { suffixWLD = filename.substring(s+1); } } return new AffineTransform2D(elements); } /** * Returns the localized resources for producing error messages. */ private Errors errors() { return Errors.forLocale(getLocale()); } /** * Returns the Image I/O format names or MIME types of the image read by this data store. * More than one names may be returned if the format has aliases or if the MIME type * has legacy types (e.g. official {@code "image/png"} and legacy {@code "image/x-png"}). * * @param asMimeType {@code true} for MIME types, or {@code false} for format names. * @return the requested names, or an empty array if none or unknown. */ public String[] getImageFormat(final boolean asMimeType) { @SuppressWarnings("LocalVariableHidesMemberVariable") final ImageReader reader = this.reader; if (reader != null) { final ImageReaderSpi p = reader.getOriginatingProvider(); if (p != null) { final String[] names = asMimeType ? p.getMIMETypes() : p.getFormatNames(); if (names != null) { return names; } } } return CharSequences.EMPTY_ARRAY; } /** * Returns paths to the main file together with auxiliary files. * * @return paths to the main file and auxiliary files, or an empty value if unknown. * @throws DataStoreException if the URI cannot be converted to a {@link Path}. */ @Override public synchronized Optional<FileSet> getFileSet() throws DataStoreException { if (suffixWLD == null) try { getGridGeometry(MAIN_IMAGE); // Will compute `suffixWLD` as a side effect. } catch (URISyntaxException | IOException e) { throw new DataStoreException(e); } return listComponentFiles(suffixWLD, PRJ); // `suffixWLD` still null if file was not found. } /** * Returns the source to report in a call to a {@link #customizer} method. * * @param index image index. * @return the source to declare. */ final CoverageModifier.Source source(final int index) { return new CoverageModifier.Source(this, index, null); } /** * Gets the grid geometry for image at the given index. * This method should be invoked only once per image, and the result cached. * * @param index index of the image for which to read the grid geometry. * @return grid geometry of the image at the given index. * @throws IndexOutOfBoundsException if the image index is out of bounds. * @throws URISyntaxException if an error occurred while normalizing the URI. * @throws IOException if an I/O error occurred. * @throws DataStoreException if the {@code *.prj} or {@code *.tfw} auxiliary file content cannot be parsed. */ final GridGeometry getGridGeometry(final int index) throws URISyntaxException, IOException, DataStoreException { assert Thread.holdsLock(this); @SuppressWarnings("LocalVariableHidesMemberVariable") final ImageReader reader = reader(); if (gridGeometry == null) { width = reader.getWidth (MAIN_IMAGE); height = reader.getHeight(MAIN_IMAGE); final var extent = new GridExtent(width, height); final AffineTransform2D gridToCRS = readWorldFile(); readPRJ(WorldFileStore.class, "getGridGeometry"); gridGeometry = customizer.customize(source(index), new GridGeometry(extent, CELL_ANCHOR, gridToCRS, crs)); } if (index != MAIN_IMAGE) { final int w = reader.getWidth (index); final int h = reader.getHeight(index); if (w != width || h != height) { // Cannot use `gridToCRS` and `crs` because they may not apply. return new GridGeometry(new GridExtent(w, h), CELL_ANCHOR, null, null); } } return gridGeometry; } /** * Sets the store-wide grid geometry when a new coverage is written. The {@link WritableStore} implementation * is responsible for making sure that the new grid geometry is compatible with preexisting grid geometry. * * @param index index of the image for which to set the grid geometry. * @param gg the new grid geometry. * @return suffix of the "world file", or {@code null} if the image cannot be written. */ String setGridGeometry(final int index, final GridGeometry gg) throws URISyntaxException, IOException, DataStoreException { if (index != MAIN_IMAGE) { return null; } final GridExtent extent = gg.getExtent(); final int w = Math.toIntExact(extent.getSize(WorldFileResource.X_DIMENSION)); final int h = Math.toIntExact(extent.getSize(WorldFileResource.Y_DIMENSION)); final String s = (suffixWLD != null) ? suffixWLD : getWorldFileSuffix(); crs = gg.isDefined(GridGeometry.CRS) ? gg.getCoordinateReferenceSystem() : null; gridGeometry = gg; // Set only after success of all the above. width = w; height = h; suffixWLD = s; return s; } /** * Returns information about the data store as a whole. */ @Override public synchronized Metadata getMetadata() throws DataStoreException { if (metadata == null) try { final var builder = new MetadataBuilder(); String format = reader().getFormatName(); for (final String key : KNOWN_FORMATS) { if (key.equalsIgnoreCase(format)) { if (builder.setPredefinedFormat(key, listeners, false)) { format = null; } break; } } builder.addFormatName(format); // Does nothing if `format` is null. builder.addFormatReaderSIS(WorldFileStoreProvider.NAME); builder.addResourceScope(ScopeCode.valueOf("COVERAGE"), null); builder.addSpatialRepresentation(null, getGridGeometry(MAIN_IMAGE), true); if (gridGeometry.isDefined(GridGeometry.ENVELOPE)) { builder.addExtent(gridGeometry.getEnvelope(), listeners); } mergeAuxiliaryMetadata(WorldFileStore.class, builder); builder.addTitleOrIdentifier(getFilename(), MetadataBuilder.Scope.ALL); builder.setISOStandards(false); metadata = customizer.customize(new CoverageModifier.Source(this), builder.build()); } catch (URISyntaxException | IOException e) { throw new DataStoreException(e); } return metadata; } /** * Returns all images in this store. Note that fetching the size of the list is a potentially costly operation. * * @return list of images in this store. * @throws DataStoreException if an error occurred while fetching components. */ @SuppressWarnings("ReturnOfCollectionOrArrayField") public synchronized Collection<? extends GridCoverageResource> components() throws DataStoreException { if (components == null) try { components = new Components(reader().getNumImages(false)); } catch (IOException e) { throw new DataStoreException(e); } return components; } /** * Returns all images in this store, or {@code null} if none and {@code create} is false. * * @param create whether to create the component list if it was not already created. * @param numImages number of images, or any negative value if unknown. */ @SuppressWarnings("ReturnOfCollectionOrArrayField") final Components components(final boolean create, final int numImages) { if (components == null && create) { components = new Components(numImages); } return components; } /** * A list of images where each {@link WorldFileResource} instance is initialized when first needed. * Fetching the list size may be a costly operation and will be done only if requested. */ final class Components extends ListOfUnknownSize<WorldFileResource> { /** * Size of this list, or any negative value if unknown. */ private int size; /** * All elements in this list. Some array elements may be {@code null} if the image * has never been requested. */ private WorldFileResource[] images; /** * Creates a new list of images. * * @param numImages number of images, or any negative value if unknown. */ private Components(final int numImages) { size = numImages; images = new WorldFileResource[Math.max(numImages, 1)]; } /** * Returns the number of images in this list. * This method may be costly when invoked for the first time. */ @Override public int size() { synchronized (WorldFileStore.this) { if (size < 0) try { size = reader().getNumImages(true); images = ArraysExt.resize(images, size); } catch (IOException e) { throw new UncheckedIOException(e); } catch (DataStoreException e) { throw new BackingStoreException(e); } return size; } } /** * Returns the number of images if this information is known, or any negative value otherwise. * This is used by {@link ListOfUnknownSize} for optimizing some operations. */ @Override protected int sizeIfKnown() { synchronized (WorldFileStore.this) { return size; } } /** * Returns {@code true} if an element exists at the given index. * Current implementations is not more efficient than {@link #get(int)}. */ @Override protected boolean exists(final int index) { synchronized (WorldFileStore.this) { if (size >= 0) { return index >= 0 && index < size; } try { return get(index) != null; } catch (IndexOutOfBoundsException e) { return false; } } } /** * Returns the image at the given index. New instances are created when first requested. * * @param index index of the image for which to get a resource. * @return resource for the image identified by the given index. * @throws IndexOutOfBoundsException if the image index is out of bounds. */ @Override public WorldFileResource get(final int index) { synchronized (WorldFileStore.this) { WorldFileResource image = null; if (index < images.length) { image = images[index]; } if (image == null) try { image = createImageResource(index); if (index >= images.length) { images = Arrays.copyOf(images, Math.max(images.length * 2, index + 1)); } images[index] = image; } catch (IOException e) { throw new UncheckedIOException(e); } catch (URISyntaxException | DataStoreException e) { throw new BackingStoreException(e); } return image; } } /** * Invoked <em>after</em> an image has been added to the image file. * This method adds in this list a reference to the newly added file. * * @param image the image to add to this list. */ final void added(final WorldFileResource image) { size = image.getImageIndex(); if (size >= images.length) { images = Arrays.copyOf(images, size * 2); } images[size++] = image; } /** * Invoked <em>after</em> an image has been removed from the image file. * This method performs no bounds check (it must be done by the caller). * * @param index index of the image that has been removed. */ final void removed(int index) throws DataStoreException { final int last = images.length - 1; System.arraycopy(images, index+1, images, index, last - index); images[last] = null; size--; while (index < last) { final WorldFileResource image = images[index++]; if (image != null) image.decrementImageIndex(); } } /** * Removes the element at the specified position in this list. */ @Override public WorldFileResource remove(final int index) { final WorldFileResource image = get(index); try { WorldFileStore.this.remove(image); } catch (DataStoreException e) { throw new UnsupportedOperationException(e); } return image; } } /** * Invoked by {@link Components} when the caller want to remove a resource. * The actual implementation is provided by {@link WritableStore}. */ void remove(final Resource resource) throws DataStoreException { throw new ReadOnlyStorageException(); } /** * Creates a {@link GridCoverageResource} for the specified image. * This method is invoked by {@link Components} when first needed * and the result is cached by the caller. * * @param index index of the image for which to create a resource. * @return resource for the image identified by the given index. * @throws IndexOutOfBoundsException if the image index is out of bounds. * @throws URISyntaxException if an error occurred while normalizing the URI. */ WorldFileResource createImageResource(final int index) throws DataStoreException, URISyntaxException, IOException { return new WorldFileResource(this, listeners, index, getGridGeometry(index)); } /** * Whether the component of this data store is used only as a delegate. * This is {@code false} when the components will be given to the user, * or {@code true} if the singleton component will be used only for internal purposes. */ boolean isComponentHidden() { return false; } /** * Prepares an image reader compatible with the writer and sets its input. * This method is invoked for switching from write mode to read mode. * Its actual implementation is provided by {@link WritableResource}. * * @param current the current image reader, or {@code null} if none. * @return the image reader to use, or {@code null} if none. * @throws IOException if an error occurred while preparing the reader. */ ImageReader prepareReader(ImageReader current) throws IOException { return null; } /** * Returns the reader without doing any validation. The reader may be {@code null} either * because the store is closed or because the store is initially opened in write-only mode. * The reader may have a {@code null} input. */ final ImageReader getCurrentReader() { return reader; } /** * Returns the reader if it has not been closed. * * @throws DataStoreClosedException if this data store is closed. * @throws IOException if an error occurred while preparing the reader. */ final ImageReader reader() throws DataStoreException, IOException { assert Thread.holdsLock(this); ImageReader current = reader; if (current == null || current.getInput() == null) { reader = current = prepareReader(current); if (current == null) { throw new DataStoreClosedException(getLocale(), WorldFileStoreProvider.NAME, StandardOpenOption.READ); } configureReader(); } return current; } /** * Closes this data store and releases any underlying resources. * If a read operation is in progress, it will be aborted. * * @throws DataStoreException if an error occurred while closing this data store. */ @Override public void close() throws DataStoreException { listeners.close(); // Should never fail. final ImageReader codec = reader; if (codec != null) codec.abort(); synchronized (this) { final Closeable stream = toClose; reader = null; toClose = null; metadata = null; components = null; gridGeometry = null; try { Object input = null; if (codec != null) { input = codec.getInput(); codec.reset(); codec.dispose(); if (input instanceof AutoCloseable) { ((AutoCloseable) input).close(); } } if (stream != null && stream != input) { stream.close(); } } catch (Exception e) { throw new DataStoreException(e); } } } }
googleapis/google-cloud-java
35,499
java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/SubscriberEvent.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/subscriber_event.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.channel.v1; /** * * * <pre> * Represents information which resellers will get as part of notification from * Pub/Sub. * </pre> * * Protobuf type {@code google.cloud.channel.v1.SubscriberEvent} */ public final class SubscriberEvent extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.SubscriberEvent) SubscriberEventOrBuilder { private static final long serialVersionUID = 0L; // Use SubscriberEvent.newBuilder() to construct. private SubscriberEvent(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SubscriberEvent() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SubscriberEvent(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.SubscriberEventProto .internal_static_google_cloud_channel_v1_SubscriberEvent_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.SubscriberEventProto .internal_static_google_cloud_channel_v1_SubscriberEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.SubscriberEvent.class, com.google.cloud.channel.v1.SubscriberEvent.Builder.class); } private int eventCase_ = 0; @SuppressWarnings("serial") private java.lang.Object event_; public enum EventCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CUSTOMER_EVENT(1), ENTITLEMENT_EVENT(2), EVENT_NOT_SET(0); private final int value; private EventCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static EventCase valueOf(int value) { return forNumber(value); } public static EventCase forNumber(int value) { switch (value) { case 1: return CUSTOMER_EVENT; case 2: return ENTITLEMENT_EVENT; case 0: return EVENT_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public EventCase getEventCase() { return EventCase.forNumber(eventCase_); } public static final int CUSTOMER_EVENT_FIELD_NUMBER = 1; /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> * * @return Whether the customerEvent field is set. */ @java.lang.Override public boolean hasCustomerEvent() { return eventCase_ == 1; } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> * * @return The customerEvent. */ @java.lang.Override public com.google.cloud.channel.v1.CustomerEvent getCustomerEvent() { if (eventCase_ == 1) { return (com.google.cloud.channel.v1.CustomerEvent) event_; } return com.google.cloud.channel.v1.CustomerEvent.getDefaultInstance(); } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.CustomerEventOrBuilder getCustomerEventOrBuilder() { if (eventCase_ == 1) { return (com.google.cloud.channel.v1.CustomerEvent) event_; } return com.google.cloud.channel.v1.CustomerEvent.getDefaultInstance(); } public static final int ENTITLEMENT_EVENT_FIELD_NUMBER = 2; /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> * * @return Whether the entitlementEvent field is set. */ @java.lang.Override public boolean hasEntitlementEvent() { return eventCase_ == 2; } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> * * @return The entitlementEvent. */ @java.lang.Override public com.google.cloud.channel.v1.EntitlementEvent getEntitlementEvent() { if (eventCase_ == 2) { return (com.google.cloud.channel.v1.EntitlementEvent) event_; } return com.google.cloud.channel.v1.EntitlementEvent.getDefaultInstance(); } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ @java.lang.Override public com.google.cloud.channel.v1.EntitlementEventOrBuilder getEntitlementEventOrBuilder() { if (eventCase_ == 2) { return (com.google.cloud.channel.v1.EntitlementEvent) event_; } return com.google.cloud.channel.v1.EntitlementEvent.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (eventCase_ == 1) { output.writeMessage(1, (com.google.cloud.channel.v1.CustomerEvent) event_); } if (eventCase_ == 2) { output.writeMessage(2, (com.google.cloud.channel.v1.EntitlementEvent) event_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (eventCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.channel.v1.CustomerEvent) event_); } if (eventCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.channel.v1.EntitlementEvent) event_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.SubscriberEvent)) { return super.equals(obj); } com.google.cloud.channel.v1.SubscriberEvent other = (com.google.cloud.channel.v1.SubscriberEvent) obj; if (!getEventCase().equals(other.getEventCase())) return false; switch (eventCase_) { case 1: if (!getCustomerEvent().equals(other.getCustomerEvent())) return false; break; case 2: if (!getEntitlementEvent().equals(other.getEntitlementEvent())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (eventCase_) { case 1: hash = (37 * hash) + CUSTOMER_EVENT_FIELD_NUMBER; hash = (53 * hash) + getCustomerEvent().hashCode(); break; case 2: hash = (37 * hash) + ENTITLEMENT_EVENT_FIELD_NUMBER; hash = (53 * hash) + getEntitlementEvent().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.SubscriberEvent parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.SubscriberEvent parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.SubscriberEvent parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.channel.v1.SubscriberEvent prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents information which resellers will get as part of notification from * Pub/Sub. * </pre> * * Protobuf type {@code google.cloud.channel.v1.SubscriberEvent} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.SubscriberEvent) com.google.cloud.channel.v1.SubscriberEventOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.SubscriberEventProto .internal_static_google_cloud_channel_v1_SubscriberEvent_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.SubscriberEventProto .internal_static_google_cloud_channel_v1_SubscriberEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.SubscriberEvent.class, com.google.cloud.channel.v1.SubscriberEvent.Builder.class); } // Construct using com.google.cloud.channel.v1.SubscriberEvent.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (customerEventBuilder_ != null) { customerEventBuilder_.clear(); } if (entitlementEventBuilder_ != null) { entitlementEventBuilder_.clear(); } eventCase_ = 0; event_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.SubscriberEventProto .internal_static_google_cloud_channel_v1_SubscriberEvent_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.SubscriberEvent getDefaultInstanceForType() { return com.google.cloud.channel.v1.SubscriberEvent.getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.SubscriberEvent build() { com.google.cloud.channel.v1.SubscriberEvent result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.SubscriberEvent buildPartial() { com.google.cloud.channel.v1.SubscriberEvent result = new com.google.cloud.channel.v1.SubscriberEvent(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.channel.v1.SubscriberEvent result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.cloud.channel.v1.SubscriberEvent result) { result.eventCase_ = eventCase_; result.event_ = this.event_; if (eventCase_ == 1 && customerEventBuilder_ != null) { result.event_ = customerEventBuilder_.build(); } if (eventCase_ == 2 && entitlementEventBuilder_ != null) { result.event_ = entitlementEventBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.SubscriberEvent) { return mergeFrom((com.google.cloud.channel.v1.SubscriberEvent) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.channel.v1.SubscriberEvent other) { if (other == com.google.cloud.channel.v1.SubscriberEvent.getDefaultInstance()) return this; switch (other.getEventCase()) { case CUSTOMER_EVENT: { mergeCustomerEvent(other.getCustomerEvent()); break; } case ENTITLEMENT_EVENT: { mergeEntitlementEvent(other.getEntitlementEvent()); break; } case EVENT_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCustomerEventFieldBuilder().getBuilder(), extensionRegistry); eventCase_ = 1; break; } // case 10 case 18: { input.readMessage( getEntitlementEventFieldBuilder().getBuilder(), extensionRegistry); eventCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int eventCase_ = 0; private java.lang.Object event_; public EventCase getEventCase() { return EventCase.forNumber(eventCase_); } public Builder clearEvent() { eventCase_ = 0; event_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.CustomerEvent, com.google.cloud.channel.v1.CustomerEvent.Builder, com.google.cloud.channel.v1.CustomerEventOrBuilder> customerEventBuilder_; /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> * * @return Whether the customerEvent field is set. */ @java.lang.Override public boolean hasCustomerEvent() { return eventCase_ == 1; } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> * * @return The customerEvent. */ @java.lang.Override public com.google.cloud.channel.v1.CustomerEvent getCustomerEvent() { if (customerEventBuilder_ == null) { if (eventCase_ == 1) { return (com.google.cloud.channel.v1.CustomerEvent) event_; } return com.google.cloud.channel.v1.CustomerEvent.getDefaultInstance(); } else { if (eventCase_ == 1) { return customerEventBuilder_.getMessage(); } return com.google.cloud.channel.v1.CustomerEvent.getDefaultInstance(); } } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ public Builder setCustomerEvent(com.google.cloud.channel.v1.CustomerEvent value) { if (customerEventBuilder_ == null) { if (value == null) { throw new NullPointerException(); } event_ = value; onChanged(); } else { customerEventBuilder_.setMessage(value); } eventCase_ = 1; return this; } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ public Builder setCustomerEvent( com.google.cloud.channel.v1.CustomerEvent.Builder builderForValue) { if (customerEventBuilder_ == null) { event_ = builderForValue.build(); onChanged(); } else { customerEventBuilder_.setMessage(builderForValue.build()); } eventCase_ = 1; return this; } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ public Builder mergeCustomerEvent(com.google.cloud.channel.v1.CustomerEvent value) { if (customerEventBuilder_ == null) { if (eventCase_ == 1 && event_ != com.google.cloud.channel.v1.CustomerEvent.getDefaultInstance()) { event_ = com.google.cloud.channel.v1.CustomerEvent.newBuilder( (com.google.cloud.channel.v1.CustomerEvent) event_) .mergeFrom(value) .buildPartial(); } else { event_ = value; } onChanged(); } else { if (eventCase_ == 1) { customerEventBuilder_.mergeFrom(value); } else { customerEventBuilder_.setMessage(value); } } eventCase_ = 1; return this; } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ public Builder clearCustomerEvent() { if (customerEventBuilder_ == null) { if (eventCase_ == 1) { eventCase_ = 0; event_ = null; onChanged(); } } else { if (eventCase_ == 1) { eventCase_ = 0; event_ = null; } customerEventBuilder_.clear(); } return this; } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ public com.google.cloud.channel.v1.CustomerEvent.Builder getCustomerEventBuilder() { return getCustomerEventFieldBuilder().getBuilder(); } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.CustomerEventOrBuilder getCustomerEventOrBuilder() { if ((eventCase_ == 1) && (customerEventBuilder_ != null)) { return customerEventBuilder_.getMessageOrBuilder(); } else { if (eventCase_ == 1) { return (com.google.cloud.channel.v1.CustomerEvent) event_; } return com.google.cloud.channel.v1.CustomerEvent.getDefaultInstance(); } } /** * * * <pre> * Customer event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.CustomerEvent customer_event = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.CustomerEvent, com.google.cloud.channel.v1.CustomerEvent.Builder, com.google.cloud.channel.v1.CustomerEventOrBuilder> getCustomerEventFieldBuilder() { if (customerEventBuilder_ == null) { if (!(eventCase_ == 1)) { event_ = com.google.cloud.channel.v1.CustomerEvent.getDefaultInstance(); } customerEventBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.CustomerEvent, com.google.cloud.channel.v1.CustomerEvent.Builder, com.google.cloud.channel.v1.CustomerEventOrBuilder>( (com.google.cloud.channel.v1.CustomerEvent) event_, getParentForChildren(), isClean()); event_ = null; } eventCase_ = 1; onChanged(); return customerEventBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.EntitlementEvent, com.google.cloud.channel.v1.EntitlementEvent.Builder, com.google.cloud.channel.v1.EntitlementEventOrBuilder> entitlementEventBuilder_; /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> * * @return Whether the entitlementEvent field is set. */ @java.lang.Override public boolean hasEntitlementEvent() { return eventCase_ == 2; } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> * * @return The entitlementEvent. */ @java.lang.Override public com.google.cloud.channel.v1.EntitlementEvent getEntitlementEvent() { if (entitlementEventBuilder_ == null) { if (eventCase_ == 2) { return (com.google.cloud.channel.v1.EntitlementEvent) event_; } return com.google.cloud.channel.v1.EntitlementEvent.getDefaultInstance(); } else { if (eventCase_ == 2) { return entitlementEventBuilder_.getMessage(); } return com.google.cloud.channel.v1.EntitlementEvent.getDefaultInstance(); } } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ public Builder setEntitlementEvent(com.google.cloud.channel.v1.EntitlementEvent value) { if (entitlementEventBuilder_ == null) { if (value == null) { throw new NullPointerException(); } event_ = value; onChanged(); } else { entitlementEventBuilder_.setMessage(value); } eventCase_ = 2; return this; } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ public Builder setEntitlementEvent( com.google.cloud.channel.v1.EntitlementEvent.Builder builderForValue) { if (entitlementEventBuilder_ == null) { event_ = builderForValue.build(); onChanged(); } else { entitlementEventBuilder_.setMessage(builderForValue.build()); } eventCase_ = 2; return this; } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ public Builder mergeEntitlementEvent(com.google.cloud.channel.v1.EntitlementEvent value) { if (entitlementEventBuilder_ == null) { if (eventCase_ == 2 && event_ != com.google.cloud.channel.v1.EntitlementEvent.getDefaultInstance()) { event_ = com.google.cloud.channel.v1.EntitlementEvent.newBuilder( (com.google.cloud.channel.v1.EntitlementEvent) event_) .mergeFrom(value) .buildPartial(); } else { event_ = value; } onChanged(); } else { if (eventCase_ == 2) { entitlementEventBuilder_.mergeFrom(value); } else { entitlementEventBuilder_.setMessage(value); } } eventCase_ = 2; return this; } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ public Builder clearEntitlementEvent() { if (entitlementEventBuilder_ == null) { if (eventCase_ == 2) { eventCase_ = 0; event_ = null; onChanged(); } } else { if (eventCase_ == 2) { eventCase_ = 0; event_ = null; } entitlementEventBuilder_.clear(); } return this; } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ public com.google.cloud.channel.v1.EntitlementEvent.Builder getEntitlementEventBuilder() { return getEntitlementEventFieldBuilder().getBuilder(); } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ @java.lang.Override public com.google.cloud.channel.v1.EntitlementEventOrBuilder getEntitlementEventOrBuilder() { if ((eventCase_ == 2) && (entitlementEventBuilder_ != null)) { return entitlementEventBuilder_.getMessageOrBuilder(); } else { if (eventCase_ == 2) { return (com.google.cloud.channel.v1.EntitlementEvent) event_; } return com.google.cloud.channel.v1.EntitlementEvent.getDefaultInstance(); } } /** * * * <pre> * Entitlement event sent as part of Pub/Sub event to partners. * </pre> * * <code>.google.cloud.channel.v1.EntitlementEvent entitlement_event = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.EntitlementEvent, com.google.cloud.channel.v1.EntitlementEvent.Builder, com.google.cloud.channel.v1.EntitlementEventOrBuilder> getEntitlementEventFieldBuilder() { if (entitlementEventBuilder_ == null) { if (!(eventCase_ == 2)) { event_ = com.google.cloud.channel.v1.EntitlementEvent.getDefaultInstance(); } entitlementEventBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.EntitlementEvent, com.google.cloud.channel.v1.EntitlementEvent.Builder, com.google.cloud.channel.v1.EntitlementEventOrBuilder>( (com.google.cloud.channel.v1.EntitlementEvent) event_, getParentForChildren(), isClean()); event_ = null; } eventCase_ = 2; onChanged(); return entitlementEventBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.SubscriberEvent) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.SubscriberEvent) private static final com.google.cloud.channel.v1.SubscriberEvent DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.SubscriberEvent(); } public static com.google.cloud.channel.v1.SubscriberEvent getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SubscriberEvent> PARSER = new com.google.protobuf.AbstractParser<SubscriberEvent>() { @java.lang.Override public SubscriberEvent parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SubscriberEvent> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SubscriberEvent> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.SubscriberEvent getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/stanbol
35,635
ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/GraphMultiplexer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.stanbol.ontologymanager.multiplexer.clerezza.collector; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.APPENDED_TO_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.DEPENDS_ON_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.ENTRY_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.HAS_APPENDED_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.HAS_DEPENDENT_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.HAS_ONTOLOGY_IRI_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.HAS_SPACE_CORE_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.HAS_SPACE_CUSTOM_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.HAS_VERSION_IRI_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.IS_MANAGED_BY_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.IS_SPACE_CORE_OF_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.IS_SPACE_CUSTOM_OF_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.MANAGES_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.SCOPE_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.SESSION_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.SIZE_IN_TRIPLES_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary.SPACE_URIREF; import static org.apache.stanbol.ontologymanager.servicesapi.Vocabulary._NS_STANBOL_INTERNAL; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.clerezza.commons.rdf.Literal; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; import org.apache.clerezza.rdf.core.LiteralFactory; import org.apache.clerezza.rdf.ontologies.OWL; import org.apache.clerezza.rdf.ontologies.RDF; import org.apache.stanbol.ontologymanager.core.scope.ScopeManagerImpl; import org.apache.stanbol.ontologymanager.multiplexer.clerezza.session.SessionManagerImpl; import org.apache.stanbol.ontologymanager.servicesapi.collector.OntologyCollector; import org.apache.stanbol.ontologymanager.servicesapi.ontology.Multiplexer; import org.apache.stanbol.ontologymanager.servicesapi.scope.OntologySpace; import org.apache.stanbol.ontologymanager.servicesapi.scope.OntologySpace.SpaceType; import org.apache.stanbol.ontologymanager.servicesapi.scope.Scope; import org.apache.stanbol.ontologymanager.servicesapi.scope.ScopeManager; import org.apache.stanbol.ontologymanager.servicesapi.session.Session; import org.apache.stanbol.ontologymanager.servicesapi.session.SessionEvent; import org.apache.stanbol.ontologymanager.servicesapi.session.SessionManager; import org.apache.stanbol.ontologymanager.servicesapi.util.OntologyUtils; import org.semanticweb.owlapi.model.OWLOntologyID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * TODO make this object update its sizes as a graph changes. * * @author alexdma * */ public class GraphMultiplexer implements Multiplexer { private class InvalidMetaGraphStateException extends RuntimeException { /** * */ private static final long serialVersionUID = 3915817349833358738L; @SuppressWarnings("unused") InvalidMetaGraphStateException() { super(); } InvalidMetaGraphStateException(String message) { super(message); } } private Logger log = LoggerFactory.getLogger(getClass()); private Graph meta; public GraphMultiplexer(Graph metaGraph) { this.meta = metaGraph; } /** * Creates an {@link OWLOntologyID} object by combining the ontologyIRI and the versionIRI, where * applicable, of the stored graph. * * @param resource * the ontology * @return */ protected OWLOntologyID buildPublicKey(final IRI resource) { // TODO desanitize? org.semanticweb.owlapi.model.IRI oiri = null, viri = null; Iterator<Triple> it = meta.filter(resource, HAS_ONTOLOGY_IRI_URIREF, null); if (it.hasNext()) { RDFTerm obj = it.next().getObject(); if (obj instanceof IRI) oiri = org.semanticweb.owlapi.model.IRI.create(((IRI) obj).getUnicodeString()); else if (obj instanceof Literal) oiri = org.semanticweb.owlapi.model.IRI.create(((Literal) obj).getLexicalForm()); } else { // Anonymous ontology? Decode the resource itself (which is not null) return OntologyUtils.decode(resource.getUnicodeString()); } it = meta.filter(resource, HAS_VERSION_IRI_URIREF, null); if (it.hasNext()) { RDFTerm obj = it.next().getObject(); if (obj instanceof IRI) viri = org.semanticweb.owlapi.model.IRI.create(((IRI) obj).getUnicodeString()); else if (obj instanceof Literal) viri = org.semanticweb.owlapi.model.IRI.create(((Literal) obj).getLexicalForm()); } if (viri == null) return new OWLOntologyID(oiri); else return new OWLOntologyID(oiri, viri); } /** * Creates an {@link IRI} out of an {@link OWLOntologyID}, so it can be used as an identifier. This * does NOT necessarily correspond to the IRI that identifies the stored graph. In order to obtain * that, check the objects of any MAPS_TO_GRAPH assertions. * * @param publicKey * @return */ protected IRI buildResource(final OWLOntologyID publicKey) { if (publicKey == null) throw new IllegalArgumentException( "Cannot build a IRI resource on a null public key!"); // The IRI is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional?) // XXX should versionIRI also include the version IRI set by owners? Currently not // Remember not to sanitize logical identifiers. org.semanticweb.owlapi.model.IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI(); if (ontologyIri == null) throw new IllegalArgumentException( "Cannot build a IRI resource on an anonymous public key!"); log.debug("Searching for a meta graph entry for public key:"); log.debug(" -- {}", publicKey); IRI match = null; LiteralFactory lf = LiteralFactory.getInstance(); Literal oiri = lf.createTypedLiteral(new IRI(ontologyIri.toString())); Literal viri = versionIri == null ? null : lf.createTypedLiteral(new IRI(versionIri .toString())); for (Iterator<Triple> it = meta.filter(null, HAS_ONTOLOGY_IRI_URIREF, oiri); it.hasNext();) { RDFTerm subj = it.next().getSubject(); log.debug(" -- Ontology IRI match found. Scanning"); log.debug(" -- RDFTerm : {}", subj); if (!(subj instanceof IRI)) { log.debug(" ---- (uncomparable: skipping...)"); continue; } if (viri != null) { // Must find matching versionIRI if (meta.contains(new TripleImpl((IRI) subj, HAS_VERSION_IRI_URIREF, viri))) { log.debug(" ---- Version IRI match!"); match = (IRI) subj; break; // Found } else { log.debug(" ---- Expected version IRI match not found."); continue; // There could be another with the right versionIRI. } } else { // Must find unversioned resource if (meta.filter((IRI) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) { log.debug(" ---- Unexpected version IRI found. Skipping."); continue; } else { log.debug(" ---- Unversioned match!"); match = (IRI) subj; break; // Found } } } log.debug("Matching IRI in graph : {}", match); if (match == null) return new IRI(OntologyUtils.encode(publicKey)); else return match; } private void checkHandle(IRI candidate, Set<OntologyCollector> handles) { /* * We have to do it like this because we cannot make this class a Component and reference ONManager * and SessionManager, otherwise an activation cycle will occur. */ ScopeManager scopeManager = ScopeManagerImpl.get(); // FIXME get rid of this. SessionManager sessionManager = SessionManagerImpl.get(); String prefix_scope = _NS_STANBOL_INTERNAL + Scope.shortName + "/", prefix_session = _NS_STANBOL_INTERNAL + Session.shortName + "/"; // TODO check when not explicitly typed. SpaceType spaceType; if (meta.contains(new TripleImpl(candidate, RDF.type, SPACE_URIREF))) { RDFTerm rScope; Iterator<Triple> parentSeeker = meta.filter(candidate, IS_SPACE_CORE_OF_URIREF, null); if (parentSeeker.hasNext()) { rScope = parentSeeker.next().getObject(); spaceType = SpaceType.CORE; } else { parentSeeker = meta.filter(candidate, IS_SPACE_CUSTOM_OF_URIREF, null); if (parentSeeker.hasNext()) { rScope = parentSeeker.next().getObject(); spaceType = SpaceType.CUSTOM; } else { parentSeeker = meta.filter(null, HAS_SPACE_CORE_URIREF, candidate); if (parentSeeker.hasNext()) { rScope = parentSeeker.next().getSubject(); spaceType = SpaceType.CORE; } else { parentSeeker = meta.filter(null, HAS_SPACE_CUSTOM_URIREF, candidate); if (parentSeeker.hasNext()) { rScope = parentSeeker.next().getSubject(); spaceType = SpaceType.CUSTOM; } else throw new InvalidMetaGraphStateException("Ontology space " + candidate + " does not declare a parent scope."); } } } if (!(rScope instanceof IRI)) throw new InvalidMetaGraphStateException( rScope + " is not a legal scope identifier."); String scopeId = ((IRI) rScope).getUnicodeString().substring(prefix_scope.length()); Scope scope = scopeManager.getScope(scopeId); switch (spaceType) { case CORE: handles.add(scope.getCoreSpace()); break; case CUSTOM: handles.add(scope.getCustomSpace()); break; } } else if (meta.contains(new TripleImpl(candidate, RDF.type, SESSION_URIREF))) { String sessionId = candidate.getUnicodeString().substring(prefix_session.length()); handles.add(sessionManager.getSession(sessionId)); } } @Override public void clearDependencies(OWLOntologyID dependent) { if (dependent == null) throw new IllegalArgumentException("dependent cannot be null"); log.debug("Clearing dependencies for {}", dependent); Set<Triple> dependencies = new HashSet<Triple>(); synchronized (meta) { Set<OWLOntologyID> aliases = listAliases(dependent); aliases.add(dependent); for (OWLOntologyID depalias : aliases) { IRI dep = buildResource(depalias); Iterator<Triple> it = meta.filter(dep, DEPENDS_ON_URIREF, null); while (it.hasNext()) { Triple t = it.next(); dependencies.add(t); log.debug(" ... Set {} as a dependency to remove.", t.getObject()); } it = meta.filter(null, HAS_DEPENDENT_URIREF, dep); while (it.hasNext()) { Triple t = it.next(); dependencies.add(t); log.debug(" ... Set {} as a dependency to remove.", t.getSubject()); } } meta.removeAll(dependencies); } log.debug(" ... DONE clearing dependencies."); } @Override public Set<OWLOntologyID> getDependencies(OWLOntologyID dependent) { Set<OWLOntologyID> dependencies = new HashSet<OWLOntologyID>(); log.debug("Getting dependencies for {}", dependent); synchronized (meta) { Set<OWLOntologyID> aliases = listAliases(dependent); aliases.add(dependent); for (OWLOntologyID depalias : aliases) { IRI dep = buildResource(depalias); Iterator<Triple> it = meta.filter(dep, DEPENDS_ON_URIREF, null); while (it.hasNext()) { RDFTerm obj = it.next().getObject(); log.debug(" ... found {} (inverse).", obj); if (obj instanceof IRI) dependencies.add(buildPublicKey((IRI) obj)); else log.warn(" ... Unexpected literal value!"); } it = meta.filter(null, HAS_DEPENDENT_URIREF, dep); while (it.hasNext()) { RDFTerm sub = it.next().getSubject(); log.debug(" ... found {} (inverse).", sub); if (sub instanceof IRI) dependencies.add(buildPublicKey((IRI) sub)); else log.warn(" ... Unexpected literal value!"); } } } return dependencies; } @Override public Set<OWLOntologyID> getDependents(OWLOntologyID dependency) { Set<OWLOntologyID> dependents = new HashSet<OWLOntologyID>(); IRI dep = buildResource(dependency); log.debug("Getting depents for {}", dependency); synchronized (meta) { Iterator<Triple> it = meta.filter(null, DEPENDS_ON_URIREF, dep); while (it.hasNext()) { RDFTerm sub = it.next().getSubject(); log.debug(" ... found {} (inverse).", sub); if (sub instanceof IRI) dependents.add(buildPublicKey((IRI) sub)); else log.warn(" ... Unexpected literal value!"); } it = meta.filter(dep, HAS_DEPENDENT_URIREF, null); while (it.hasNext()) { RDFTerm obj = it.next().getObject(); log.debug(" ... found {} (inverse).", obj); if (obj instanceof IRI) dependents.add(buildPublicKey((IRI) obj)); else log.warn(" ... Unexpected literal value!"); } } return dependents; } @Override public Set<OntologyCollector> getHandles(OWLOntologyID publicKey) { Set<OntologyCollector> handles = new HashSet<OntologyCollector>(); Set<OWLOntologyID> aliases = listAliases(publicKey); aliases.add(publicKey); for (OWLOntologyID alias : aliases) { IRI ontologyId = buildResource(alias); for (Iterator<Triple> it = meta.filter(null, MANAGES_URIREF, ontologyId); it.hasNext();) { BlankNodeOrIRI sub = it.next().getSubject(); if (sub instanceof IRI) checkHandle((IRI) sub, handles); else throw new InvalidMetaGraphStateException( sub + " is not a valid ontology collector identifer."); } for (Iterator<Triple> it = meta.filter(ontologyId, IS_MANAGED_BY_URIREF, null); it.hasNext();) { RDFTerm obj = it.next().getObject(); if (obj instanceof IRI) checkHandle((IRI) obj, handles); else throw new InvalidMetaGraphStateException( obj + " is not a valid ontology collector identifer."); } } return handles; // throw new UnsupportedOperationException("Not implemented yet."); } private IRI getIRIforScope(String scopeId) { // Use the Stanbol-internal namespace, so that the whole configuration can be ported. return new IRI(_NS_STANBOL_INTERNAL + Scope.shortName + "/" + scopeId); } private IRI getIRIforSession(Session session) { // Use the Stanbol-internal namespace, so that the whole configuration can be ported. return new IRI(_NS_STANBOL_INTERNAL + Session.shortName + "/" + session.getID()); } private IRI getIRIforSpace(OntologySpace space) { // Use the Stanbol-internal namespace, so that the whole configuration can be ported. return new IRI(_NS_STANBOL_INTERNAL + OntologySpace.shortName + "/" + space.getID()); } @Override public OWLOntologyID getPublicKey(String stringForm) { if (stringForm == null || stringForm.trim().isEmpty()) throw new IllegalArgumentException( "String form must not be null or empty."); return buildPublicKey(new IRI(stringForm)); } @Override public Set<OWLOntologyID> getPublicKeys() { Set<OWLOntologyID> result = new HashSet<OWLOntologyID>(); Iterator<Triple> it = meta.filter(null, RDF.type, ENTRY_URIREF); while (it.hasNext()) { RDFTerm obj = it.next().getSubject(); if (obj instanceof IRI) result.add(buildPublicKey((IRI) obj)); } return result; } @Override public int getSize(OWLOntologyID publicKey) { IRI subj = buildResource(publicKey); Iterator<Triple> it = meta.filter(subj, SIZE_IN_TRIPLES_URIREF, null); if (it.hasNext()) { RDFTerm obj = it.next().getObject(); if (obj instanceof Literal) { String s = ((Literal) obj).getLexicalForm(); try { return Integer.parseInt(s); } catch (Exception ex) { log.warn("Not a valid integer value {} for size of {}", s, publicKey); return -1; } } } return 0; } /* * XXX see if we can use reasoners, either live or by caching materialisations. */ protected Set<OWLOntologyID> listAliases(OWLOntologyID publicKey) { if (publicKey == null || publicKey.isAnonymous()) throw new IllegalArgumentException( "Cannot locate aliases for null or anonymous public keys."); Set<OWLOntologyID> aliases = new HashSet<OWLOntologyID>(); IRI ont = buildResource(publicKey); // Forwards for (Iterator<Triple> it = meta.filter(ont, OWL.sameAs, null); it.hasNext();) { RDFTerm r = it.next().getObject(); if (r instanceof IRI) aliases.add(buildPublicKey((IRI) r)); } // Backwards for (Iterator<Triple> it = meta.filter(null, OWL.sameAs, ont); it.hasNext();) { RDFTerm r = it.next().getSubject(); if (r instanceof IRI) aliases.add(buildPublicKey((IRI) r)); } return aliases; } @Override public void onOntologyAdded(OntologyCollector collector, OWLOntologyID addedOntology) { // When the ontology provider hears an ontology has been added to a collector, it has to register this // into the metadata graph. // log.info("Heard addition of ontology {} to collector {}", addedOntology, collector.getID()); // log.info("This ontology is stored as {}", getKey(addedOntology)); String colltype = ""; if (collector instanceof Scope) colltype = Scope.shortName + "/"; // Cannot be else if (collector instanceof OntologySpace) colltype = OntologySpace.shortName + "/"; else if (collector instanceof Session) colltype = Session.shortName + "/"; IRI c = new IRI(_NS_STANBOL_INTERNAL + colltype + collector.getID()); IRI u = // new IRI(prefix + "::" + keymap.buildResource(addedOntology).getUnicodeString()); // keymap.getMapping(addedOntology); buildResource(addedOntology); // TODO OntologyProvider should not be aware of scopes, spaces or sessions. Move elsewhere. boolean hasValues = false; log.debug("Ontology {}", addedOntology); log.debug("-- is already managed by the following collectors :"); for (Iterator<Triple> it = meta.filter(u, IS_MANAGED_BY_URIREF, null); it.hasNext();) { hasValues = true; log.debug("-- {}", it.next().getObject()); } for (Iterator<Triple> it = meta.filter(null, MANAGES_URIREF, u); it.hasNext();) { hasValues = true; log.debug("-- {} (inverse)", it.next().getSubject()); } if (!hasValues) log.debug("-- <none>"); // Add both inverse triples. This graph has to be traversed efficiently, no need for reasoners. IRI predicate1 = null, predicate2 = null; if (collector instanceof OntologySpace) { predicate1 = MANAGES_URIREF; predicate2 = IS_MANAGED_BY_URIREF; } else if (collector instanceof Session) { // TODO implement model for sessions. predicate1 = MANAGES_URIREF; predicate2 = IS_MANAGED_BY_URIREF; } else { log.error("Unrecognized ontology collector type {} for \"{}\". Aborting.", collector.getClass(), collector.getID()); return; } if (u != null) synchronized (meta) { Triple t; if (predicate1 != null) { t = new TripleImpl(c, predicate1, u); boolean b = meta.add(t); log.debug((b ? "Successful" : "Redundant") + " addition of meta triple"); log.debug("-- {} ", t); } if (predicate2 != null) { t = new TripleImpl(u, predicate2, c); boolean b = meta.add(t); log.debug((b ? "Successful" : "Redundant") + " addition of meta triple"); log.debug("-- {} ", t); } } } @Override public void onOntologyRemoved(OntologyCollector collector, OWLOntologyID removedOntology) { log.info("Heard removal of ontology {} from collector {}", removedOntology, collector.getID()); String colltype = ""; if (collector instanceof Scope) colltype = Scope.shortName + "/"; // Cannot be else if (collector instanceof OntologySpace) colltype = OntologySpace.shortName + "/"; else if (collector instanceof Session) colltype = Session.shortName + "/"; IRI c = new IRI(_NS_STANBOL_INTERNAL + colltype + collector.getID()); Set<OWLOntologyID> aliases = listAliases(removedOntology); aliases.add(removedOntology); boolean badState = true; for (OWLOntologyID alias : aliases) { IRI u = buildResource(alias); // XXX condense the following code log.debug("Checking ({},{}) pattern", c, u); for (Iterator<Triple> it = meta.filter(c, null, u); it.hasNext();) { IRI property = it.next().getPredicate(); if (collector instanceof OntologySpace || collector instanceof Session) { if (property.equals(MANAGES_URIREF)) badState = false; } } log.debug("Checking ({},{}) pattern", u, c); for (Iterator<Triple> it = meta.filter(u, null, c); it.hasNext();) { IRI property = it.next().getPredicate(); if (collector instanceof OntologySpace || collector instanceof Session) { if (property.equals(IS_MANAGED_BY_URIREF)) badState = false; } } synchronized (meta) { if (collector instanceof OntologySpace || collector instanceof Session) { meta.remove(new TripleImpl(c, MANAGES_URIREF, u)); meta.remove(new TripleImpl(u, IS_MANAGED_BY_URIREF, c)); } } } if (badState) throw new InvalidMetaGraphStateException( "No relationship found between ontology collector " + c + " and stored ontology " + removedOntology + " (or its aliases)."); } @Override public void removeDependency(OWLOntologyID dependent, OWLOntologyID dependency) { if (dependent == null) throw new IllegalArgumentException("dependent cannot be null"); if (dependency == null) throw new IllegalArgumentException("dependency cannot be null"); log.debug("Removing dependency."); log.debug(" ... dependent : {}", dependent); log.debug(" ... dependency : {}", dependency); IRI depy = buildResource(dependency); synchronized (meta) { Set<OWLOntologyID> aliases = listAliases(dependent); aliases.add(dependent); for (OWLOntologyID depalias : aliases) { IRI dep = buildResource(depalias); Triple t = new TripleImpl(dep, DEPENDS_ON_URIREF, depy); boolean found = false; if (meta.contains(t)) { found = true; meta.remove(t); } t = new TripleImpl(depy, HAS_DEPENDENT_URIREF, dep); if (meta.contains(t)) { found = true; meta.remove(t); } if (!found) log.warn("No such dependency found."); else log.debug("DONE removing dependency."); } } } @Override public void scopeActivated(Scope scope) {} @Override public void scopeAppended(Session session, String scopeId) { final IRI sessionur = getIRIforSession(session), scopeur = getIRIforScope(scopeId); if (sessionur == null || scopeur == null) throw new IllegalArgumentException( "IRIs for scope and session cannot be null."); if (meta instanceof Graph) synchronized (meta) { meta.add(new TripleImpl(sessionur, HAS_APPENDED_URIREF, scopeur)); meta.add(new TripleImpl(scopeur, APPENDED_TO_URIREF, sessionur)); } } @Override public void scopeCreated(Scope scope) {} @Override public void scopeDeactivated(Scope scope) {} @Override public void scopeDetached(Session session, String scopeId) { final IRI sessionur = getIRIforSession(session), scopeur = getIRIforScope(scopeId); if (sessionur == null || scopeur == null) throw new IllegalArgumentException( "IRIs for scope and session cannot be null."); if (meta instanceof Graph) synchronized (meta) { // TripleImpl implements equals() and hashCode() ... meta.remove(new TripleImpl(sessionur, HAS_APPENDED_URIREF, scopeur)); meta.remove(new TripleImpl(scopeur, APPENDED_TO_URIREF, sessionur)); } } @Override public void scopeRegistered(Scope scope) { updateScopeRegistration(scope); } @Override public void scopeUnregistered(Scope scope) { updateScopeUnregistration(scope); } @Override public void sessionChanged(SessionEvent event) { switch (event.getOperationType()) { case CREATE: updateSessionRegistration(event.getSession()); break; case KILL: updateSessionUnregistration(event.getSession()); break; default: break; } } @Override public void setDependency(OWLOntologyID dependent, OWLOntologyID dependency) { if (dependent == null) throw new IllegalArgumentException("dependent cannot be null"); if (dependency == null) throw new IllegalArgumentException("dependency cannot be null"); log.debug("Setting dependency."); log.debug(" ... dependent : {}", dependent); log.debug(" ... dependency : {}", dependency); IRI dep = buildResource(dependent), depy = buildResource(dependency); // TODO check for the actual resource! synchronized (meta) { meta.add(new TripleImpl(dep, DEPENDS_ON_URIREF, depy)); } log.debug("DONE setting dependency."); } /** * Write registration info for a new ontology scope and its spaces. * * @param scope * the scope whose information needs to be updated. */ private void updateScopeRegistration(Scope scope) { final IRI scopeur = getIRIforScope(scope.getID()); final IRI coreur = getIRIforSpace(scope.getCoreSpace()); final IRI custur = getIRIforSpace(scope.getCustomSpace()); // If this method was called after a scope rebuild, the following will have little to no effect. synchronized (meta) { // Spaces are created along with the scope, so it is safe to add their triples. meta.add(new TripleImpl(scopeur, RDF.type, SCOPE_URIREF)); meta.add(new TripleImpl(coreur, RDF.type, SPACE_URIREF)); meta.add(new TripleImpl(custur, RDF.type, SPACE_URIREF)); meta.add(new TripleImpl(scopeur, HAS_SPACE_CORE_URIREF, coreur)); meta.add(new TripleImpl(scopeur, HAS_SPACE_CUSTOM_URIREF, custur)); // Add inverse predicates so we can traverse the graph in both directions. meta.add(new TripleImpl(coreur, IS_SPACE_CORE_OF_URIREF, scopeur)); meta.add(new TripleImpl(custur, IS_SPACE_CUSTOM_OF_URIREF, scopeur)); } log.debug("Ontology collector information triples added for scope \"{}\".", scope); } /** * Remove all information on a deregistered ontology scope and its spaces. * * @param scope * the scope whose information needs to be updated. */ private void updateScopeUnregistration(Scope scope) { long before = System.currentTimeMillis(); boolean removable = false, conflict = false; final IRI scopeur = getIRIforScope(scope.getID()); final IRI coreur = getIRIforSpace(scope.getCoreSpace()); final IRI custur = getIRIforSpace(scope.getCustomSpace()); Set<Triple> removeUs = new HashSet<Triple>(); for (Iterator<Triple> it = meta.filter(scopeur, null, null); it.hasNext();) { Triple t = it.next(); if (RDF.type.equals(t.getPredicate())) { if (SCOPE_URIREF.equals(t.getObject())) removable = true; else conflict = true; } removeUs.add(t); } if (!removable) { log.error("Cannot write scope deregistration to persistence:"); log.error("-- resource {}", scopeur); log.error("-- is not typed as a {} in the meta-graph.", SCOPE_URIREF); } else if (conflict) { log.error("Conflict upon scope deregistration:"); log.error("-- resource {}", scopeur); log.error("-- has incompatible types in the meta-graph."); } else { log.debug("Removing all triples for scope \"{}\".", scope.getID()); Iterator<Triple> it; for (it = meta.filter(null, null, scopeur); it.hasNext();) removeUs.add(it.next()); for (it = meta.filter(null, null, coreur); it.hasNext();) removeUs.add(it.next()); for (it = meta.filter(coreur, null, null); it.hasNext();) removeUs.add(it.next()); for (it = meta.filter(null, null, custur); it.hasNext();) removeUs.add(it.next()); for (it = meta.filter(custur, null, null); it.hasNext();) removeUs.add(it.next()); meta.removeAll(removeUs); log.debug("Done; removed {} triples in {} ms.", removeUs.size(), System.currentTimeMillis() - before); } } private void updateSessionRegistration(Session session) { final IRI sesur = getIRIforSession(session); // If this method was called after a session rebuild, the following will have little to no effect. synchronized (meta) { // The only essential triple to add is typing meta.add(new TripleImpl(sesur, RDF.type, SESSION_URIREF)); } log.debug("Ontology collector information triples added for session \"{}\".", sesur); } private void updateSessionUnregistration(Session session) { long before = System.currentTimeMillis(); boolean removable = false, conflict = false; final IRI sessionur = getIRIforSession(session); Set<Triple> removeUs = new HashSet<Triple>(); for (Iterator<Triple> it = meta.filter(sessionur, null, null); it.hasNext();) { Triple t = it.next(); if (RDF.type.equals(t.getPredicate())) { if (SESSION_URIREF.equals(t.getObject())) removable = true; else conflict = true; } removeUs.add(t); } if (!removable) { log.error("Cannot write session deregistration to persistence:"); log.error("-- resource {}", sessionur); log.error("-- is not typed as a {} in the meta-graph.", SESSION_URIREF); } else if (conflict) { log.error("Conflict upon session deregistration:"); log.error("-- resource {}", sessionur); log.error("-- has incompatible types in the meta-graph."); } else { log.debug("Removing all triples for session \"{}\".", session.getID()); Iterator<Triple> it; for (it = meta.filter(null, null, sessionur); it.hasNext();) removeUs.add(it.next()); for (it = meta.filter(sessionur, null, null); it.hasNext();) removeUs.add(it.next()); meta.removeAll(removeUs); log.debug("Done; removed {} triples in {} ms.", removeUs.size(), System.currentTimeMillis() - before); } } }
apache/xmlgraphics-batik
35,542
batik-gvt/src/main/java/org/apache/batik/gvt/CompositeGraphicsNode.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.gvt; import java.awt.Graphics2D; import java.awt.Rectangle; import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.GeneralPath; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.util.Collection; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.NoSuchElementException; import org.apache.batik.util.HaltingThread; /** * A CompositeGraphicsNode is a graphics node that can contain graphics nodes. * * <br>Note: this class is a 'little bit aware of' other threads, but not really threadsafe. * * @author <a href="mailto:Thierry.Kormann@sophia.inria.fr">Thierry Kormann</a> * @version $Id$ */ public class CompositeGraphicsNode extends AbstractGraphicsNode implements List { public static final Rectangle2D VIEWPORT = new Rectangle(); public static final Rectangle2D NULL_RECT = new Rectangle(); /** * The children of this composite graphics node. */ protected GraphicsNode [] children; /** * The number of children of this composite graphics node. */ protected volatile int count; /** * The number of times the children list has been structurally modified. */ protected volatile int modCount; /** * This flag indicates if this node has BackgroundEnable = 'new'. * If so traversal of the gvt tree can halt here. */ protected Rectangle2D backgroundEnableRgn = null; /** * Internal Cache: Geometry bounds for this node, not taking into * account any of its children rendering attributes into account */ private volatile Rectangle2D geometryBounds; /** * Internal Cache: Primitive bounds. */ private volatile Rectangle2D primitiveBounds; /** * Internal Cache: Sensitive bounds. */ private volatile Rectangle2D sensitiveBounds; /** * Internal Cache: the outline. */ private Shape outline; /** * Constructs a new empty <code>CompositeGraphicsNode</code>. */ public CompositeGraphicsNode() {} // // Structural methods // /** * Returns the list of children. */ public List getChildren() { return this; } /** * Sets the enable background property to the specified rectangle. * * @param bgRgn the region that defines the background enable property */ public void setBackgroundEnable(Rectangle2D bgRgn) { backgroundEnableRgn = bgRgn; } /** * Returns the region defining the background enable property. */ public Rectangle2D getBackgroundEnable() { return backgroundEnableRgn; } /** * Sets if this node is visible or not depending on the specified value. * Don't fire a graphicsNodeChange event because this doesn't really * effect us (it effects our children through CSS inheritence). * * @param isVisible If true this node is visible */ public void setVisible(boolean isVisible) { // fireGraphicsNodeChangeStarted(); this.isVisible = isVisible; // fireGraphicsNodeChangeCompleted(); } // // Drawing methods // /** * Paints this node without applying Filter, Mask, Composite, and clip. * * @param g2d the Graphics2D to use */ public void primitivePaint(Graphics2D g2d) { if (count == 0) { return; } // Thread.currentThread() is potentially expensive, so reuse my instance in hasBeenHalted() Thread currentThread = Thread.currentThread(); // Paint children for (int i=0; i < count; ++i) { if (HaltingThread.hasBeenHalted( currentThread )) return; GraphicsNode node = children[i]; if (node == null) { continue; } node.paint(g2d); } } // // Event support methods // // // Geometric methods // /** * Invalidates the cached geometric bounds. This method is called * each time an attribute that affects the bounds of this node * changed. */ protected void invalidateGeometryCache() { super.invalidateGeometryCache(); geometryBounds = null; primitiveBounds = null; sensitiveBounds = null; outline = null; } /** * Returns the bounds of the area covered by this node's primitive paint. */ public Rectangle2D getPrimitiveBounds() { if (primitiveBounds != null) { if (primitiveBounds == NULL_RECT) return null; return primitiveBounds; } // Thread.currentThread() is potentially expensive, so reuse my instance in hasBeenHalted() Thread currentThread = Thread.currentThread(); int i=0; Rectangle2D bounds = null; while ((bounds == null) && i < count) { bounds = children[i++].getTransformedBounds(IDENTITY); if (((i & 0x0F) == 0) && HaltingThread.hasBeenHalted( currentThread )) break; // check every 16 children if we have been interrupted. } if (HaltingThread.hasBeenHalted( currentThread )) { invalidateGeometryCache(); return null; } if (bounds == null) { primitiveBounds = NULL_RECT; return null; } primitiveBounds = bounds; while (i < count) { Rectangle2D ctb = children[i++].getTransformedBounds(IDENTITY); if (ctb != null) { if (primitiveBounds == null) { // another thread has set the primitive bounds to null, // need to recall this function return null; } else { primitiveBounds.add(ctb); } } if (((i & 0x0F) == 0) && HaltingThread.hasBeenHalted( currentThread )) break; // check every 16 children if we have been interrupted. } // Check If we should halt early. if (HaltingThread.hasBeenHalted( currentThread )) { // The Thread has been halted. // Invalidate any cached values and proceed. invalidateGeometryCache(); } return primitiveBounds; } /** * Transforms a Rectangle 2D by an affine transform. It assumes the transform * is only scale/translate so there is no loss of precision over transforming * the source geometry. */ public static Rectangle2D getTransformedBBox(Rectangle2D r2d, AffineTransform t) { if ((t == null) || (r2d == null)) return r2d; double x = r2d.getX(); double w = r2d.getWidth(); double y = r2d.getY(); double h = r2d.getHeight(); double sx = t.getScaleX(); double sy = t.getScaleY(); if (sx < 0) { x = -(x + w); sx = -sx; } if (sy < 0) { y = -(y + h); sy = -sy; } return new Rectangle2D.Float ((float)(x*sx+t.getTranslateX()), (float)(y*sy+t.getTranslateY()), (float)(w*sx), (float)(h*sy)); } /** * Returns the bounds of this node's primitivePaint after applying * the input transform (if any), concatenated with this node's * transform (if any). * * @param txf the affine transform with which this node's transform should * be concatenated. Should not be null. */ public Rectangle2D getTransformedPrimitiveBounds(AffineTransform txf) { AffineTransform t = txf; if (transform != null) { t = new AffineTransform(txf); t.concatenate(transform); } if ((t == null) || ((t.getShearX() == 0) && (t.getShearY() == 0))) { // No rotation it's safe to simply transform our bounding box. return getTransformedBBox(getPrimitiveBounds(), t); } int i = 0; Rectangle2D tpb = null; while (tpb == null && i < count) { tpb = children[i++].getTransformedBounds(t); } while (i < count) { Rectangle2D ctb = children[i++].getTransformedBounds(t); if(ctb != null){ tpb.add(ctb); } } return tpb; } /** * Returns the bounds of the area covered by this node, without * taking any of its rendering attributes into account. That is, * exclusive of any clipping, masking, filtering or stroking, for * example. */ public Rectangle2D getGeometryBounds() { if (geometryBounds == null) { // System.err.println("geometryBounds are null"); int i=0; while(geometryBounds == null && i < count){ geometryBounds = children[i++].getTransformedGeometryBounds (IDENTITY); } while (i<count) { Rectangle2D cgb = children[i++].getTransformedGeometryBounds(IDENTITY); if (cgb != null) { if (geometryBounds == null) { // another thread has set the geometry bounds to null, // need to recall this function return getGeometryBounds(); } else { geometryBounds.add(cgb); } } } } return geometryBounds; } /** * Returns the bounds of the area covered by this node, without taking any * of its rendering attribute into accoun. That is, exclusive of any clipping, * masking, filtering or stroking, for example. The returned value is * transformed by the concatenation of the input transform and this node's * transform. * * @param txf the affine transform with which this node's transform should * be concatenated. Should not be null. */ public Rectangle2D getTransformedGeometryBounds(AffineTransform txf) { AffineTransform t = txf; if (transform != null) { t = new AffineTransform(txf); t.concatenate(transform); } if ((t == null) || ((t.getShearX() == 0) && (t.getShearY() == 0))) { // No rotation it's safe to simply transform our bounding box. return getTransformedBBox(getGeometryBounds(), t); } Rectangle2D gb = null; int i=0; while (gb == null && i < count) { gb = children[i++].getTransformedGeometryBounds(t); } Rectangle2D cgb = null; while (i < count) { cgb = children[i++].getTransformedGeometryBounds(t); if (cgb != null) { gb.add(cgb); } } return gb; } /** * Returns the bounds of the sensitive area covered by this node, * This includes the stroked area but does not include the effects * of clipping, masking or filtering. */ public Rectangle2D getSensitiveBounds() { if (sensitiveBounds != null) return sensitiveBounds; // System.out.println("sensitiveBoundsBounds are null"); int i=0; while(sensitiveBounds == null && i < count){ sensitiveBounds = children[i++].getTransformedSensitiveBounds(IDENTITY); } while (i<count) { Rectangle2D cgb = children[i++].getTransformedSensitiveBounds(IDENTITY); if (cgb != null) { if (sensitiveBounds == null) // another thread has set the geometry bounds to null, // need to recall this function return getSensitiveBounds(); sensitiveBounds.add(cgb); } } return sensitiveBounds; } /** * Returns the bounds of the sensitive area covered by this node, * This includes the stroked area but does not include the effects * of clipping, masking or filtering. The returned value is * transformed by the concatenation of the input transform and * this node's transform. * * @param txf the affine transform with which this node's * transform should be concatenated. Should not be null. */ public Rectangle2D getTransformedSensitiveBounds(AffineTransform txf) { AffineTransform t = txf; if (transform != null) { t = new AffineTransform(txf); t.concatenate(transform); } if ((t == null) || ((t.getShearX() == 0) && (t.getShearY() == 0))) { // No rotation it's safe to simply transform our bounding box. return getTransformedBBox(getSensitiveBounds(), t); } Rectangle2D sb = null; int i=0; while (sb == null && i < count) { sb = children[i++].getTransformedSensitiveBounds(t); } while (i < count) { Rectangle2D csb = children[i++].getTransformedSensitiveBounds(t); if (csb != null) { sb.add(csb); } } return sb; } /** * Returns true if the specified Point2D is inside the boundary of this * node, false otherwise. * * @param p the specified Point2D in the user space */ public boolean contains(Point2D p) { Rectangle2D bounds = getSensitiveBounds(); if (count > 0 && bounds != null && bounds.contains(p)) { Point2D pt = null; Point2D cp = null; // Propagated to children for (int i=0; i < count; ++i) { AffineTransform t = children[i].getInverseTransform(); if(t != null){ pt = t.transform(p, pt); cp = pt; } else { cp = p; } if (children[i].contains(cp)) { return true; } } } return false; } /** * Returns the GraphicsNode containing point p if this node or one of its * children is sensitive to mouse events at p. * * @param p the specified Point2D in the user space */ public GraphicsNode nodeHitAt(Point2D p) { Rectangle2D bounds = getSensitiveBounds(); if (count > 0 && bounds != null && bounds.contains(p)) { // Go backward because the children are in rendering order Point2D pt = null; Point2D cp = null; // Propagated to children for (int i=count-1; i >= 0; --i) { AffineTransform t = children[i].getInverseTransform(); if(t != null){ pt = t.transform(p, pt); cp = pt; } else { cp = p; } GraphicsNode node = children[i].nodeHitAt(cp); if (node != null) { return node; } } } return null; } /** * Returns the outline of this node. */ public Shape getOutline() { if (outline != null) return outline; outline = new GeneralPath(); for (int i = 0; i < count; i++) { Shape childOutline = children[i].getOutline(); if (childOutline != null) { AffineTransform tr = children[i].getTransform(); if (tr != null) { ((GeneralPath)outline).append(tr.createTransformedShape(childOutline), false); } else { ((GeneralPath)outline).append(childOutline, false); } } } return outline; } // // Structural info // /** * Sets the root node of this grahics node and modify all its children. */ protected void setRoot(RootGraphicsNode newRoot) { super.setRoot(newRoot); for (int i=0; i < count; ++i) { GraphicsNode node = children[i]; ((AbstractGraphicsNode)node).setRoot(newRoot); } } // // List implementation // /** * Returns the number of children of this composite graphics node. */ public int size() { return count; } /** * Returns true if this composite graphics node does not contain * graphics node, false otherwise. */ public boolean isEmpty() { return (count == 0); } /** * Returns true if this composite graphics node contains the * specified graphics node, false otherwise. * @param node the node to check */ public boolean contains(Object node) { return (indexOf(node) >= 0); } /** * Returns an iterator over the children of this graphics node. */ public Iterator iterator() { return new Itr(); } /** * Returns an array containing all of the graphics node in the children list * of this composite graphics node in the correct order. */ public Object [] toArray() { GraphicsNode [] result = new GraphicsNode[count]; System.arraycopy( children, 0, result, 0, count ); return result; } /** * Returns an array containing all of the graphics node in the * children list of this composite graphics node in the correct * order. If the children list fits in the specified array, it is * returned therein. Otherwise, a new array is allocated. * * @param a the array to fit if possible */ public Object[] toArray(Object [] a) { if (a.length < count) { a = new GraphicsNode[count]; } System.arraycopy(children, 0, a, 0, count); if (a.length > count) { a[count] = null; } return a; } /** * Returns the graphics node at the specified position in the children list. * * @param index the index of the graphics node to return * @exception IndexOutOfBoundsException if the index is out of range */ public Object get(int index) { checkRange(index); return children[index]; } // Modification Operations /** * Replaces the graphics node at the specified position in the children list * with the specified graphics node. * * @param index the index of the graphics node to replace * @param o the graphics node to be stored at the specified position * @return the graphics node previously at the specified position * @exception IndexOutOfBoundsException if the index is out of range * @exception IllegalArgumentException if the node is not an * instance of GraphicsNode */ public Object set(int index, Object o) { // Check for correct arguments if (!(o instanceof GraphicsNode)) { throw new IllegalArgumentException(o+" is not a GraphicsNode"); } checkRange(index); GraphicsNode node = (GraphicsNode) o; { fireGraphicsNodeChangeStarted(node); } // Reparent the graphics node and tidy up the tree's state if (node.getParent() != null) { node.getParent().getChildren().remove(node); } // Replace the node to the children list GraphicsNode oldNode = children[index]; children[index] = node; // Set the parents of the graphics nodes ((AbstractGraphicsNode) node).setParent(this); ((AbstractGraphicsNode) oldNode).setParent(null); // Set the root of the graphics node ((AbstractGraphicsNode) node).setRoot(this.getRoot()); ((AbstractGraphicsNode) oldNode).setRoot(null); // Invalidates cached values invalidateGeometryCache(); // Create and dispatch events // int id = CompositeGraphicsNodeEvent.GRAPHICS_NODE_REMOVED; // dispatchEvent(new CompositeGraphicsNodeEvent(this, id, oldNode)); // id = CompositeGraphicsNodeEvent.GRAPHICS_NODE_ADDED; // dispatchEvent(new CompositeGraphicsNodeEvent(this, id, node)); fireGraphicsNodeChangeCompleted(); return oldNode; } /** * Adds the specified graphics node to this composite graphics node. * * @param o the graphics node to add * @return true (as per the general contract of Collection.add) * @exception IllegalArgumentException if the node is not an * instance of GraphicsNode */ public boolean add(Object o) { // Check for correct argument if (!(o instanceof GraphicsNode)) { throw new IllegalArgumentException(o+" is not a GraphicsNode"); } GraphicsNode node = (GraphicsNode) o; { fireGraphicsNodeChangeStarted(node); } // Reparent the graphics node and tidy up the tree's state if (node.getParent() != null) { node.getParent().getChildren().remove(node); } // Add the graphics node to the children list ensureCapacity(count + 1); // Increments modCount!! children[count++] = node; // Set the parent of the graphics node ((AbstractGraphicsNode) node).setParent(this); // Set the root of the graphics node ((AbstractGraphicsNode) node).setRoot(this.getRoot()); // Invalidates cached values invalidateGeometryCache(); // Create and dispatch event // int id = CompositeGraphicsNodeEvent.GRAPHICS_NODE_ADDED; // dispatchEvent(new CompositeGraphicsNodeEvent(this, id, node)); fireGraphicsNodeChangeCompleted(); return true; } /** * Inserts the specified graphics node at the specified position in this * children list. Shifts the graphics node currently at that position (if * any) and any subsequent graphics nodes to the right (adds one to their * indices). * * @param index the position at which the specified graphics node is to * be inserted. * @param o the graphics node to be inserted. * @exception IndexOutOfBoundsException if the index is out of range * @exception IllegalArgumentException if the node is not an * instance of GraphicsNode */ public void add(int index, Object o) { // Check for correct arguments if (!(o instanceof GraphicsNode)) { throw new IllegalArgumentException(o+" is not a GraphicsNode"); } if (index > count || index < 0) { throw new IndexOutOfBoundsException( "Index: "+index+", Size: "+count); } GraphicsNode node = (GraphicsNode) o; { fireGraphicsNodeChangeStarted(node); } // Reparent the graphics node and tidy up the tree's state if (node.getParent() != null) { node.getParent().getChildren().remove(node); } // Insert the node to the children list ensureCapacity(count+1); // Increments modCount!! System.arraycopy(children, index, children, index+1, count-index); children[index] = node; count++; // Set parent of the graphics node ((AbstractGraphicsNode) node).setParent(this); // Set root of the graphics node ((AbstractGraphicsNode) node).setRoot(this.getRoot()); // Invalidates cached values invalidateGeometryCache(); // Create and dispatch event // int id = CompositeGraphicsNodeEvent.GRAPHICS_NODE_ADDED; // dispatchEvent(new CompositeGraphicsNodeEvent(this, id, node)); fireGraphicsNodeChangeCompleted(); } /** * <b>Not supported</b> - * Throws <code>UnsupportedOperationException</code> exception. */ public boolean addAll(Collection c) { throw new UnsupportedOperationException(); } /** * <b>Not supported</b> - * Throws <code>UnsupportedOperationException</code> exception. */ public boolean addAll(int index, Collection c) { throw new UnsupportedOperationException(); } /** * Removes the first instance of the specified graphics node from the children list. * * @param o the node the remove * @return true if the children list contains the specified graphics node * @exception IllegalArgumentException if the node is not an * instance of GraphicsNode * @exception IndexOutOfBoundsException when o is not in children list */ public boolean remove(Object o) { // Check for correct argument if (!(o instanceof GraphicsNode)) { throw new IllegalArgumentException(o+" is not a GraphicsNode"); } GraphicsNode node = (GraphicsNode) o; if (node.getParent() != this) { return false; } // Remove the node int index = 0; for (; node != children[index]; index++) { // fires exception when node not found! } remove(index); return true; } /** * Removes the graphics node at the specified position in the children list. * Shifts any subsequent graphics nodes to the left (subtracts one from * their indices). * * @param index the position of the graphics node to remove * @return the graphics node that was removed * @exception IndexOutOfBoundsException if index out of range */ public Object remove(int index) { // Check for correct argument checkRange(index); GraphicsNode oldNode = children[index]; { fireGraphicsNodeChangeStarted(oldNode); } // Remove the node at the specified index modCount++; int numMoved = count - index - 1; if (numMoved > 0) { System.arraycopy(children, index+1, children, index, numMoved); } children[--count] = null; // Let gc do its work if (count == 0) { children = null; } // Set parent of the node ((AbstractGraphicsNode) oldNode).setParent(null); // Set root of the node ((AbstractGraphicsNode) oldNode).setRoot(null); // Invalidates cached values invalidateGeometryCache(); // Create and dispatch event // int id = CompositeGraphicsNodeEvent.GRAPHICS_NODE_REMOVED; // dispatchEvent(new CompositeGraphicsNodeEvent(this, id, oldNode)); fireGraphicsNodeChangeCompleted(); return oldNode; } /** * <b>Not supported</b> - * Throws <code>UnsupportedOperationException</code> exception. */ public boolean removeAll(Collection c) { throw new UnsupportedOperationException(); } /** * <b>Not supported</b> - * Throws <code>UnsupportedOperationException</code> exception. */ public boolean retainAll(Collection c) { throw new UnsupportedOperationException(); } /** * <b>Not supported</b> - * Throws <code>UnsupportedOperationException</code> exception. */ public void clear() { throw new UnsupportedOperationException(); } /** * Returns true if this composite graphics node contains all the graphics * node in the specified collection, false otherwise. * * @param c the collection to be checked for containment */ public boolean containsAll(Collection c) { for (Object aC : c) { if (!contains(aC)) { return false; } } return true; } // Search Operations /** * Returns the index in the children list of the specified graphics node or * -1 if the children list does not contain this graphics node. * * @param node the graphics node to search for */ public int indexOf(Object node) { if (node == null || !(node instanceof GraphicsNode)) { return -1; } if (((GraphicsNode) node).getParent() == this) { int iCount = count; // local is cheaper GraphicsNode[] workList = children; // local is cheaper for (int i = 0; i < iCount; i++) { if (node == workList[ i ]) { return i; } } } return -1; } /** * Returns the index in this children list of the last occurence of the * specified graphics node, or -1 if the list does not contain this graphics * node. * * @param node the graphics node to search for */ public int lastIndexOf(Object node) { if (node == null || !(node instanceof GraphicsNode)) { return -1; } if (((GraphicsNode) node).getParent() == this) { for (int i = count-1; i >= 0; i--) { if (node == children[i]) { return i; } } } return -1; } // List Iterators /** * Returns an iterator over the children of this graphics node. */ public ListIterator listIterator() { return listIterator(0); } /** * Returns an iterator over the children of this graphics node, starting at * the specified position in the children list. * * @param index the index of the first graphics node to return * from the children list */ public ListIterator listIterator(int index) { if (index < 0 || index > count) { throw new IndexOutOfBoundsException("Index: "+index); } return new ListItr(index); } // View /** * <b>Not supported</b> - * Throws <code>UnsupportedOperationException</code> exception. */ public List subList(int fromIndex, int toIndex) { throw new UnsupportedOperationException(); } /** * Checks if the given index is in range. If not, throws an appropriate * runtime exception. * * @param index the index to check */ private void checkRange(int index) { if (index >= count || index < 0) { throw new IndexOutOfBoundsException( "Index: "+index+", Size: "+count); } } /** * Increases the capacity of the children list, if necessary, to ensure that * it can hold at least the number of graphics nodes specified by the * minimum capacity argument. * * @param minCapacity the desired minimum capacity. */ public void ensureCapacity(int minCapacity) { if (children == null) { children = new GraphicsNode[4]; } modCount++; int oldCapacity = children.length; if (minCapacity > oldCapacity) { GraphicsNode [] oldData = children; int newCapacity = oldCapacity + oldCapacity/2 + 1; if (newCapacity < minCapacity) { newCapacity = minCapacity; } children = new GraphicsNode[newCapacity]; System.arraycopy(oldData, 0, children, 0, count); } } /** * An implementation of the java.util.Iterator interface. */ private class Itr implements Iterator { /** * Index of graphics node to be returned by subsequent call to next. */ int cursor = 0; /** * Index of graphics node returned by most recent call to next or * previous. Reset to -1 if this graphics node is deleted by a call * to remove. */ int lastRet = -1; /** * The modCount value that the iterator believes that the backing * List should have. If this expectation is violated, the iterator * has detected concurrent modification. */ int expectedModCount = modCount; public boolean hasNext() { return cursor != count; } public Object next() { try { Object next = get(cursor); checkForComodification(); lastRet = cursor++; return next; } catch(IndexOutOfBoundsException e) { checkForComodification(); throw new NoSuchElementException(); } } public void remove() { if (lastRet == -1) { throw new IllegalStateException(); } checkForComodification(); try { CompositeGraphicsNode.this.remove(lastRet); if (lastRet < cursor) { cursor--; } lastRet = -1; expectedModCount = modCount; } catch(IndexOutOfBoundsException e) { throw new ConcurrentModificationException(); } } final void checkForComodification() { if (modCount != expectedModCount) { throw new ConcurrentModificationException(); } } } /** * An implementation of the java.util.ListIterator interface. */ private class ListItr extends Itr implements ListIterator { ListItr(int index) { cursor = index; } public boolean hasPrevious() { return cursor != 0; } public Object previous() { try { Object previous = get(--cursor); checkForComodification(); lastRet = cursor; return previous; } catch(IndexOutOfBoundsException e) { checkForComodification(); throw new NoSuchElementException(); } } public int nextIndex() { return cursor; } public int previousIndex() { return cursor-1; } public void set(Object o) { if (lastRet == -1) { throw new IllegalStateException(); } checkForComodification(); try { CompositeGraphicsNode.this.set(lastRet, o); expectedModCount = modCount; } catch(IndexOutOfBoundsException e) { throw new ConcurrentModificationException(); } } public void add(Object o) { checkForComodification(); try { CompositeGraphicsNode.this.add(cursor++, o); lastRet = -1; expectedModCount = modCount; } catch(IndexOutOfBoundsException e) { throw new ConcurrentModificationException(); } } } }
googleapis/google-cloud-java
35,522
java-cloudsecuritycompliance/proto-google-cloud-cloudsecuritycompliance-v1/src/main/java/com/google/cloud/cloudsecuritycompliance/v1/CELExpression.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/cloudsecuritycompliance/v1/common.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.cloudsecuritycompliance.v1; /** * * * <pre> * A [CEL * expression](https://cloud.google.com/certificate-authority-service/docs/using-cel). * </pre> * * Protobuf type {@code google.cloud.cloudsecuritycompliance.v1.CELExpression} */ public final class CELExpression extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.cloudsecuritycompliance.v1.CELExpression) CELExpressionOrBuilder { private static final long serialVersionUID = 0L; // Use CELExpression.newBuilder() to construct. private CELExpression(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CELExpression() { expression_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CELExpression(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.cloudsecuritycompliance.v1.CommonProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CELExpression_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.cloudsecuritycompliance.v1.CommonProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CELExpression_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.cloudsecuritycompliance.v1.CELExpression.class, com.google.cloud.cloudsecuritycompliance.v1.CELExpression.Builder.class); } private int criteriaCase_ = 0; @SuppressWarnings("serial") private java.lang.Object criteria_; public enum CriteriaCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { RESOURCE_TYPES_VALUES(3), CRITERIA_NOT_SET(0); private final int value; private CriteriaCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CriteriaCase valueOf(int value) { return forNumber(value); } public static CriteriaCase forNumber(int value) { switch (value) { case 3: return RESOURCE_TYPES_VALUES; case 0: return CRITERIA_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public CriteriaCase getCriteriaCase() { return CriteriaCase.forNumber(criteriaCase_); } public static final int RESOURCE_TYPES_VALUES_FIELD_NUMBER = 3; /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> * * @return Whether the resourceTypesValues field is set. */ @java.lang.Override public boolean hasResourceTypesValues() { return criteriaCase_ == 3; } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> * * @return The resourceTypesValues. */ @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.StringList getResourceTypesValues() { if (criteriaCase_ == 3) { return (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_; } return com.google.cloud.cloudsecuritycompliance.v1.StringList.getDefaultInstance(); } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.StringListOrBuilder getResourceTypesValuesOrBuilder() { if (criteriaCase_ == 3) { return (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_; } return com.google.cloud.cloudsecuritycompliance.v1.StringList.getDefaultInstance(); } public static final int EXPRESSION_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object expression_ = ""; /** * * * <pre> * Required. Logic expression in CEL language. * The max length of the condition is 1000 characters. * </pre> * * <code>string expression = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The expression. */ @java.lang.Override public java.lang.String getExpression() { java.lang.Object ref = expression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); expression_ = s; return s; } } /** * * * <pre> * Required. Logic expression in CEL language. * The max length of the condition is 1000 characters. * </pre> * * <code>string expression = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for expression. */ @java.lang.Override public com.google.protobuf.ByteString getExpressionBytes() { java.lang.Object ref = expression_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); expression_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, expression_); } if (criteriaCase_ == 3) { output.writeMessage(3, (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, expression_); } if (criteriaCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 3, (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.cloudsecuritycompliance.v1.CELExpression)) { return super.equals(obj); } com.google.cloud.cloudsecuritycompliance.v1.CELExpression other = (com.google.cloud.cloudsecuritycompliance.v1.CELExpression) obj; if (!getExpression().equals(other.getExpression())) return false; if (!getCriteriaCase().equals(other.getCriteriaCase())) return false; switch (criteriaCase_) { case 3: if (!getResourceTypesValues().equals(other.getResourceTypesValues())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getExpression().hashCode(); switch (criteriaCase_) { case 3: hash = (37 * hash) + RESOURCE_TYPES_VALUES_FIELD_NUMBER; hash = (53 * hash) + getResourceTypesValues().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.cloudsecuritycompliance.v1.CELExpression prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A [CEL * expression](https://cloud.google.com/certificate-authority-service/docs/using-cel). * </pre> * * Protobuf type {@code google.cloud.cloudsecuritycompliance.v1.CELExpression} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.cloudsecuritycompliance.v1.CELExpression) com.google.cloud.cloudsecuritycompliance.v1.CELExpressionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.cloudsecuritycompliance.v1.CommonProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CELExpression_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.cloudsecuritycompliance.v1.CommonProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CELExpression_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.cloudsecuritycompliance.v1.CELExpression.class, com.google.cloud.cloudsecuritycompliance.v1.CELExpression.Builder.class); } // Construct using com.google.cloud.cloudsecuritycompliance.v1.CELExpression.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (resourceTypesValuesBuilder_ != null) { resourceTypesValuesBuilder_.clear(); } expression_ = ""; criteriaCase_ = 0; criteria_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.cloudsecuritycompliance.v1.CommonProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CELExpression_descriptor; } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CELExpression getDefaultInstanceForType() { return com.google.cloud.cloudsecuritycompliance.v1.CELExpression.getDefaultInstance(); } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CELExpression build() { com.google.cloud.cloudsecuritycompliance.v1.CELExpression result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CELExpression buildPartial() { com.google.cloud.cloudsecuritycompliance.v1.CELExpression result = new com.google.cloud.cloudsecuritycompliance.v1.CELExpression(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.cloudsecuritycompliance.v1.CELExpression result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.expression_ = expression_; } } private void buildPartialOneofs( com.google.cloud.cloudsecuritycompliance.v1.CELExpression result) { result.criteriaCase_ = criteriaCase_; result.criteria_ = this.criteria_; if (criteriaCase_ == 3 && resourceTypesValuesBuilder_ != null) { result.criteria_ = resourceTypesValuesBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.cloudsecuritycompliance.v1.CELExpression) { return mergeFrom((com.google.cloud.cloudsecuritycompliance.v1.CELExpression) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.cloudsecuritycompliance.v1.CELExpression other) { if (other == com.google.cloud.cloudsecuritycompliance.v1.CELExpression.getDefaultInstance()) return this; if (!other.getExpression().isEmpty()) { expression_ = other.expression_; bitField0_ |= 0x00000002; onChanged(); } switch (other.getCriteriaCase()) { case RESOURCE_TYPES_VALUES: { mergeResourceTypesValues(other.getResourceTypesValues()); break; } case CRITERIA_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { expression_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 10 case 26: { input.readMessage( getResourceTypesValuesFieldBuilder().getBuilder(), extensionRegistry); criteriaCase_ = 3; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int criteriaCase_ = 0; private java.lang.Object criteria_; public CriteriaCase getCriteriaCase() { return CriteriaCase.forNumber(criteriaCase_); } public Builder clearCriteria() { criteriaCase_ = 0; criteria_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudsecuritycompliance.v1.StringList, com.google.cloud.cloudsecuritycompliance.v1.StringList.Builder, com.google.cloud.cloudsecuritycompliance.v1.StringListOrBuilder> resourceTypesValuesBuilder_; /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> * * @return Whether the resourceTypesValues field is set. */ @java.lang.Override public boolean hasResourceTypesValues() { return criteriaCase_ == 3; } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> * * @return The resourceTypesValues. */ @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.StringList getResourceTypesValues() { if (resourceTypesValuesBuilder_ == null) { if (criteriaCase_ == 3) { return (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_; } return com.google.cloud.cloudsecuritycompliance.v1.StringList.getDefaultInstance(); } else { if (criteriaCase_ == 3) { return resourceTypesValuesBuilder_.getMessage(); } return com.google.cloud.cloudsecuritycompliance.v1.StringList.getDefaultInstance(); } } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ public Builder setResourceTypesValues( com.google.cloud.cloudsecuritycompliance.v1.StringList value) { if (resourceTypesValuesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } criteria_ = value; onChanged(); } else { resourceTypesValuesBuilder_.setMessage(value); } criteriaCase_ = 3; return this; } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ public Builder setResourceTypesValues( com.google.cloud.cloudsecuritycompliance.v1.StringList.Builder builderForValue) { if (resourceTypesValuesBuilder_ == null) { criteria_ = builderForValue.build(); onChanged(); } else { resourceTypesValuesBuilder_.setMessage(builderForValue.build()); } criteriaCase_ = 3; return this; } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ public Builder mergeResourceTypesValues( com.google.cloud.cloudsecuritycompliance.v1.StringList value) { if (resourceTypesValuesBuilder_ == null) { if (criteriaCase_ == 3 && criteria_ != com.google.cloud.cloudsecuritycompliance.v1.StringList.getDefaultInstance()) { criteria_ = com.google.cloud.cloudsecuritycompliance.v1.StringList.newBuilder( (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_) .mergeFrom(value) .buildPartial(); } else { criteria_ = value; } onChanged(); } else { if (criteriaCase_ == 3) { resourceTypesValuesBuilder_.mergeFrom(value); } else { resourceTypesValuesBuilder_.setMessage(value); } } criteriaCase_ = 3; return this; } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ public Builder clearResourceTypesValues() { if (resourceTypesValuesBuilder_ == null) { if (criteriaCase_ == 3) { criteriaCase_ = 0; criteria_ = null; onChanged(); } } else { if (criteriaCase_ == 3) { criteriaCase_ = 0; criteria_ = null; } resourceTypesValuesBuilder_.clear(); } return this; } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ public com.google.cloud.cloudsecuritycompliance.v1.StringList.Builder getResourceTypesValuesBuilder() { return getResourceTypesValuesFieldBuilder().getBuilder(); } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.StringListOrBuilder getResourceTypesValuesOrBuilder() { if ((criteriaCase_ == 3) && (resourceTypesValuesBuilder_ != null)) { return resourceTypesValuesBuilder_.getMessageOrBuilder(); } else { if (criteriaCase_ == 3) { return (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_; } return com.google.cloud.cloudsecuritycompliance.v1.StringList.getDefaultInstance(); } } /** * * * <pre> * The resource instance types on which this expression is defined. * Format will be of the form : `&lt;canonical service name&gt;/&lt;type&gt;` * Example: `compute.googleapis.com/Instance`. * </pre> * * <code>.google.cloud.cloudsecuritycompliance.v1.StringList resource_types_values = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudsecuritycompliance.v1.StringList, com.google.cloud.cloudsecuritycompliance.v1.StringList.Builder, com.google.cloud.cloudsecuritycompliance.v1.StringListOrBuilder> getResourceTypesValuesFieldBuilder() { if (resourceTypesValuesBuilder_ == null) { if (!(criteriaCase_ == 3)) { criteria_ = com.google.cloud.cloudsecuritycompliance.v1.StringList.getDefaultInstance(); } resourceTypesValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudsecuritycompliance.v1.StringList, com.google.cloud.cloudsecuritycompliance.v1.StringList.Builder, com.google.cloud.cloudsecuritycompliance.v1.StringListOrBuilder>( (com.google.cloud.cloudsecuritycompliance.v1.StringList) criteria_, getParentForChildren(), isClean()); criteria_ = null; } criteriaCase_ = 3; onChanged(); return resourceTypesValuesBuilder_; } private java.lang.Object expression_ = ""; /** * * * <pre> * Required. Logic expression in CEL language. * The max length of the condition is 1000 characters. * </pre> * * <code>string expression = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The expression. */ public java.lang.String getExpression() { java.lang.Object ref = expression_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); expression_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Logic expression in CEL language. * The max length of the condition is 1000 characters. * </pre> * * <code>string expression = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for expression. */ public com.google.protobuf.ByteString getExpressionBytes() { java.lang.Object ref = expression_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); expression_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Logic expression in CEL language. * The max length of the condition is 1000 characters. * </pre> * * <code>string expression = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The expression to set. * @return This builder for chaining. */ public Builder setExpression(java.lang.String value) { if (value == null) { throw new NullPointerException(); } expression_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Logic expression in CEL language. * The max length of the condition is 1000 characters. * </pre> * * <code>string expression = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearExpression() { expression_ = getDefaultInstance().getExpression(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. Logic expression in CEL language. * The max length of the condition is 1000 characters. * </pre> * * <code>string expression = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for expression to set. * @return This builder for chaining. */ public Builder setExpressionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); expression_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.cloudsecuritycompliance.v1.CELExpression) } // @@protoc_insertion_point(class_scope:google.cloud.cloudsecuritycompliance.v1.CELExpression) private static final com.google.cloud.cloudsecuritycompliance.v1.CELExpression DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.cloudsecuritycompliance.v1.CELExpression(); } public static com.google.cloud.cloudsecuritycompliance.v1.CELExpression getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CELExpression> PARSER = new com.google.protobuf.AbstractParser<CELExpression>() { @java.lang.Override public CELExpression parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CELExpression> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CELExpression> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CELExpression getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/druid
35,638
processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.incremental; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Maps; import it.unimi.dsi.fastutil.objects.ObjectAVLTreeSet; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.data.input.Row; import org.apache.druid.data.input.impl.AggregateProjectionSpec; import org.apache.druid.error.DruidException; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.parsers.ParseException; import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.query.aggregation.AggregatorAndSize; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.dimension.DimensionSpec; import org.apache.druid.segment.AggregateProjectionMetadata; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.ColumnValueSelector; import org.apache.druid.segment.CursorBuildSpec; import org.apache.druid.segment.DimensionHandler; import org.apache.druid.segment.DimensionIndexer; import org.apache.druid.segment.DimensionSelector; import org.apache.druid.segment.Metadata; import org.apache.druid.segment.column.ColumnCapabilities; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.projections.Projections; import org.apache.druid.segment.projections.QueryableProjection; import org.apache.druid.utils.JvmUtils; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.Deque; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.SortedSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; /** * */ public class OnheapIncrementalIndex extends IncrementalIndex { private static final Logger log = new Logger(OnheapIncrementalIndex.class); /** * overhead per {@link ConcurrentSkipListMap.Node} object in facts table */ static final int ROUGH_OVERHEAD_PER_MAP_ENTRY = Long.BYTES * 5 + Integer.BYTES; private final ConcurrentHashMap<Integer, Aggregator[]> aggregators = new ConcurrentHashMap<>(); private final FactsHolder facts; private final AtomicInteger indexIncrement = new AtomicInteger(0); protected final int maxRowCount; protected final long maxBytesInMemory; /** * Aggregator name -> column selector factory for that aggregator. */ @Nullable private Map<String, ColumnSelectorFactory> selectors; /** * Aggregator name -> column selector factory for the combining version of that aggregator. Only set when * {@link #preserveExistingMetrics} is true. */ @Nullable private Map<String, ColumnSelectorFactory> combiningAggSelectors; @Nullable private String outOfRowsReason = null; private final SortedSet<AggregateProjectionMetadata> aggregateProjections; private final HashMap<String, OnHeapAggregateProjection> projections; OnheapIncrementalIndex( IncrementalIndexSchema incrementalIndexSchema, int maxRowCount, long maxBytesInMemory, // preserveExistingMetrics should only be set true for DruidInputSource since that is the only case where we can // have existing metrics. This is currently only use by auto compaction and should not be use for anything else. boolean preserveExistingMetrics ) { super(incrementalIndexSchema, preserveExistingMetrics); this.maxRowCount = maxRowCount; this.maxBytesInMemory = maxBytesInMemory == 0 ? Long.MAX_VALUE : maxBytesInMemory; if (incrementalIndexSchema.isRollup()) { this.facts = new RollupFactsHolder(dimsComparator(), getDimensions(), timePosition == 0); } else if (timePosition == 0) { this.facts = new PlainTimeOrderedFactsHolder(dimsComparator()); } else { this.facts = new PlainNonTimeOrderedFactsHolder(dimsComparator()); } this.aggregateProjections = new ObjectAVLTreeSet<>(AggregateProjectionMetadata.COMPARATOR); this.projections = new HashMap<>(); initializeProjections(incrementalIndexSchema); } private void initializeProjections(IncrementalIndexSchema incrementalIndexSchema) { for (AggregateProjectionSpec projectionSpec : incrementalIndexSchema.getProjections()) { // initialize them all with 0 rows AggregateProjectionMetadata.Schema schema = projectionSpec.toMetadataSchema(); aggregateProjections.add(new AggregateProjectionMetadata(schema, 0)); if (projections.containsKey(projectionSpec.getName())) { throw DruidException.defensive("duplicate projection[%s]", projectionSpec.getName()); } final OnHeapAggregateProjection projection = new OnHeapAggregateProjection( projectionSpec, this::getDimension, metric -> { MetricDesc desc = getMetric(metric); if (desc != null) { return getMetricAggs()[desc.getIndex()]; } return null; }, incrementalIndexSchema.getMinTimestamp() ); projections.put(projectionSpec.getName(), projection); } } @Override public FactsHolder getFacts() { return facts; } @Override public Metadata getMetadata() { if (aggregateProjections.isEmpty()) { return super.getMetadata(); } final List<AggregateProjectionMetadata> projectionMetadata = projections.values() .stream() .map(OnHeapAggregateProjection::toMetadata) .collect(Collectors.toList()); return super.getMetadata().withProjections(projectionMetadata); } @Override protected void initAggs( final AggregatorFactory[] metrics, final InputRowHolder inputRowHolder ) { // All non-complex aggregators share a column selector factory. Helps with value reuse. ColumnSelectorFactory nonComplexColumnSelectorFactory = null; selectors = new HashMap<>(); combiningAggSelectors = new HashMap<>(); for (AggregatorFactory agg : metrics) { final ColumnSelectorFactory factory; if (agg.getIntermediateType().is(ValueType.COMPLEX)) { factory = new CachingColumnSelectorFactory(makeColumnSelectorFactory(agg, inputRowHolder)); } else { if (nonComplexColumnSelectorFactory == null) { nonComplexColumnSelectorFactory = new CachingColumnSelectorFactory(makeColumnSelectorFactory(null, inputRowHolder)); } factory = nonComplexColumnSelectorFactory; } selectors.put(agg.getName(), factory); } if (preserveExistingMetrics) { for (AggregatorFactory agg : metrics) { final AggregatorFactory combiningAgg = agg.getCombiningFactory(); final ColumnSelectorFactory factory; if (combiningAgg.getIntermediateType().is(ValueType.COMPLEX)) { factory = new CachingColumnSelectorFactory(makeColumnSelectorFactory(combiningAgg, inputRowHolder)); } else { if (nonComplexColumnSelectorFactory == null) { nonComplexColumnSelectorFactory = new CachingColumnSelectorFactory(makeColumnSelectorFactory(null, inputRowHolder)); } factory = nonComplexColumnSelectorFactory; } combiningAggSelectors.put(combiningAgg.getName(), factory); } } } @Override protected AddToFactsResult addToFacts( IncrementalIndexRow key, InputRowHolder inputRowHolder ) { final List<String> parseExceptionMessages = new ArrayList<>(); final AtomicLong totalSizeInBytes = getBytesInMemory(); // add to projections first so if one is chosen by queries the data will always be ahead of the base table since // rows are not added atomically to all facts holders at once for (OnHeapAggregateProjection projection : projections.values()) { projection.addToFacts(key, inputRowHolder.getRow(), parseExceptionMessages, totalSizeInBytes); } final int priorIndex = facts.getPriorIndex(key); Aggregator[] aggs; final AggregatorFactory[] metrics = getMetricAggs(); final AtomicInteger numEntries = getNumEntries(); if (IncrementalIndexRow.EMPTY_ROW_INDEX != priorIndex) { aggs = aggregators.get(priorIndex); long aggSizeDelta = doAggregate(metrics, aggs, inputRowHolder, parseExceptionMessages); totalSizeInBytes.addAndGet(aggSizeDelta); } else { if (preserveExistingMetrics) { aggs = new Aggregator[metrics.length * 2]; } else { aggs = new Aggregator[metrics.length]; } long aggSizeForRow = factorizeAggs(metrics, aggs); aggSizeForRow += doAggregate(metrics, aggs, inputRowHolder, parseExceptionMessages); final int rowIndex = indexIncrement.getAndIncrement(); aggregators.put(rowIndex, aggs); final int prev = facts.putIfAbsent(key, rowIndex); if (IncrementalIndexRow.EMPTY_ROW_INDEX == prev) { numEntries.incrementAndGet(); } else { throw DruidException.defensive("Encountered existing fact entry for new key, possible concurrent add?"); } // For a new key, row size = key size + aggregator size + overhead final long estimatedSizeOfAggregators = aggSizeForRow; final long rowSize = key.estimateBytesInMemory() + estimatedSizeOfAggregators + ROUGH_OVERHEAD_PER_MAP_ENTRY; totalSizeInBytes.addAndGet(rowSize); } return new AddToFactsResult(numEntries.get(), totalSizeInBytes.get(), parseExceptionMessages); } @Override public int getLastRowIndex() { return indexIncrement.get() - 1; } /** * Creates aggregators for the given aggregator factories. * * @return Total initial size in bytes required by all the aggregators. */ private long factorizeAggs( AggregatorFactory[] metrics, Aggregator[] aggs ) { long totalInitialSizeBytes = 0L; final long aggReferenceSize = Long.BYTES; for (int i = 0; i < metrics.length; i++) { final AggregatorFactory agg = metrics[i]; // Creates aggregators to aggregate from input into output fields AggregatorAndSize aggregatorAndSize = agg.factorizeWithSize(selectors.get(agg.getName())); aggs[i] = aggregatorAndSize.getAggregator(); totalInitialSizeBytes += aggregatorAndSize.getInitialSizeBytes(); totalInitialSizeBytes += aggReferenceSize; // Creates aggregators to combine already aggregated field if (preserveExistingMetrics) { AggregatorFactory combiningAgg = agg.getCombiningFactory(); AggregatorAndSize combiningAggAndSize = combiningAgg.factorizeWithSize(combiningAggSelectors.get(combiningAgg.getName())); aggs[i + metrics.length] = combiningAggAndSize.getAggregator(); totalInitialSizeBytes += combiningAggAndSize.getInitialSizeBytes(); totalInitialSizeBytes += aggReferenceSize; } } return totalInitialSizeBytes; } /** * Performs aggregation for all of the aggregators. * * @return Total incremental memory in bytes required by this step of the aggregation. */ private long doAggregate( AggregatorFactory[] metrics, Aggregator[] aggs, InputRowHolder inputRowHolder, List<String> parseExceptionsHolder ) { return doAggregate(metrics, aggs, inputRowHolder, parseExceptionsHolder, preserveExistingMetrics); } static long doAggregate( AggregatorFactory[] metrics, Aggregator[] aggs, InputRowHolder inputRowHolder, List<String> parseExceptionsHolder, boolean preserveExistingMetrics ) { long totalIncrementalBytes = 0L; for (int i = 0; i < metrics.length; i++) { final Aggregator agg; if (preserveExistingMetrics && inputRowHolder.getRow() instanceof MapBasedRow && ((MapBasedRow) inputRowHolder.getRow()).getEvent().containsKey(metrics[i].getName())) { agg = aggs[i + metrics.length]; } else { agg = aggs[i]; } try { totalIncrementalBytes += agg.aggregateWithSize(); } catch (ParseException e) { // "aggregate" can throw ParseExceptions if a selector expects something but gets something else. if (preserveExistingMetrics) { log.warn( e, "Failing ingestion as preserveExistingMetrics is enabled but selector of aggregator[%s] received " + "incompatible type.", metrics[i].getName() ); throw e; } else { log.debug(e, "Encountered parse error, skipping aggregator[%s].", metrics[i].getName()); parseExceptionsHolder.add(e.getMessage()); } } } return totalIncrementalBytes; } private void closeAggregators() { Closer closer = Closer.create(); for (Aggregator[] aggs : aggregators.values()) { for (Aggregator agg : aggs) { closer.register(agg); } } try { closer.close(); } catch (IOException e) { throw new RuntimeException(e); } } @Nullable @Override public QueryableProjection<IncrementalIndexRowSelector> getProjection(CursorBuildSpec buildSpec) { return Projections.findMatchingProjection( buildSpec, aggregateProjections, getInterval(), (specName, columnName) -> projections.get(specName).getDimensionsMap().containsKey(columnName) || getColumnCapabilities(columnName) == null, projections::get ); } @Override public IncrementalIndexRowSelector getProjection(String name) { return projections.get(name); } @Override public boolean canAppendRow() { final boolean countCheck = numRows() < maxRowCount; // if maxBytesInMemory = -1, then ignore sizeCheck final boolean sizeCheck = maxBytesInMemory <= 0 || getBytesInMemory().get() < maxBytesInMemory; final boolean canAdd = countCheck && sizeCheck; if (!countCheck && !sizeCheck) { outOfRowsReason = StringUtils.format( "Maximum number of rows [%d] and maximum size in bytes [%d] reached", maxRowCount, maxBytesInMemory ); } else { if (!countCheck) { outOfRowsReason = StringUtils.format("Maximum number of rows [%d] reached", maxRowCount); } else if (!sizeCheck) { outOfRowsReason = StringUtils.format("Maximum size in bytes [%d] reached", maxBytesInMemory); } } return canAdd; } @Override public String getOutOfRowsReason() { return outOfRowsReason; } @Override public float getMetricFloatValue(int rowOffset, int aggOffset) { return ((Number) getMetricHelper( getMetricAggs(), aggregators.get(rowOffset), aggOffset, Aggregator::getFloat )).floatValue(); } @Override public long getMetricLongValue(int rowOffset, int aggOffset) { return ((Number) getMetricHelper( getMetricAggs(), aggregators.get(rowOffset), aggOffset, Aggregator::getLong )).longValue(); } @Override public double getMetricDoubleValue(int rowOffset, int aggOffset) { return ((Number) getMetricHelper( getMetricAggs(), aggregators.get(rowOffset), aggOffset, Aggregator::getDouble )).doubleValue(); } @Override public Object getMetricObjectValue(int rowOffset, int aggOffset) { return getMetricHelper(getMetricAggs(), aggregators.get(rowOffset), aggOffset, Aggregator::get); } @Override public boolean isNull(int rowOffset, int aggOffset) { final Aggregator[] aggs = aggregators.get(rowOffset); if (preserveExistingMetrics) { return aggs[aggOffset].isNull() && aggs[aggOffset + getMetricAggs().length].isNull(); } else { return aggs[aggOffset].isNull(); } } @Override public Iterable<Row> iterableWithPostAggregations( @Nullable final List<PostAggregator> postAggs, final boolean descending ) { final AggregatorFactory[] metrics = getMetricAggs(); { return () -> { final List<DimensionDesc> dimensions = getDimensions(); return Iterators.transform( getFacts().iterator(descending), incrementalIndexRow -> { final int rowOffset = incrementalIndexRow.getRowIndex(); Object[] theDims = incrementalIndexRow.getDims(); Map<String, Object> theVals = Maps.newLinkedHashMap(); for (int i = 0; i < theDims.length; ++i) { Object dim = theDims[i]; DimensionDesc dimensionDesc = dimensions.get(i); if (dimensionDesc == null) { continue; } String dimensionName = dimensionDesc.getName(); DimensionHandler handler = dimensionDesc.getHandler(); if (dim == null || handler.getLengthOfEncodedKeyComponent(dim) == 0) { theVals.put(dimensionName, null); continue; } final DimensionIndexer indexer = dimensionDesc.getIndexer(); Object rowVals = indexer.convertUnsortedEncodedKeyComponentToActualList(dim); theVals.put(dimensionName, rowVals); } Aggregator[] aggs = aggregators.get(rowOffset); int aggLength = preserveExistingMetrics ? aggs.length / 2 : aggs.length; for (int i = 0; i < aggLength; ++i) { theVals.put(metrics[i].getName(), getMetricHelper(metrics, aggs, i, Aggregator::get)); } if (postAggs != null) { for (PostAggregator postAgg : postAggs) { theVals.put(postAgg.getName(), postAgg.compute(theVals)); } } return new MapBasedRow(incrementalIndexRow.getTimestamp(), theVals); } ); }; } } /** * Apply the getMetricTypeFunction function to the retrieve aggregated value given the list of aggregators and offset. * If preserveExistingMetrics flag is set, then this method will combine values from two aggregators, the aggregator * for aggregating from input into output field and the aggregator for combining already aggregated field, as needed */ @Nullable private <T> Object getMetricHelper( AggregatorFactory[] metrics, Aggregator[] aggs, int aggOffset, Function<Aggregator, T> getMetricTypeFunction ) { if (preserveExistingMetrics) { // Since the preserveExistingMetrics flag is set, we will have to check and possibly retrieve the aggregated // values from two aggregators, the aggregator for aggregating from input into output field and the aggregator // for combining already aggregated field if (aggs[aggOffset].isNull()) { // If the aggregator for aggregating from input into output field is null, then we get the value from the // aggregator that we use for combining already aggregated field return getMetricTypeFunction.apply(aggs[aggOffset + metrics.length]); } else if (aggs[aggOffset + metrics.length].isNull()) { // If the aggregator for combining already aggregated field is null, then we get the value from the // aggregator for aggregating from input into output field return getMetricTypeFunction.apply(aggs[aggOffset]); } else { // Since both aggregators is not null and contain values, we will have to retrieve the values from both // aggregators and combine them AggregatorFactory aggregatorFactory = metrics[aggOffset]; T aggregatedFromSource = getMetricTypeFunction.apply(aggs[aggOffset]); T aggregatedFromCombined = getMetricTypeFunction.apply(aggs[aggOffset + metrics.length]); return aggregatorFactory.combine(aggregatedFromSource, aggregatedFromCombined); } } else { // If preserveExistingMetrics flag is not set then we simply get metrics from the list of Aggregator, aggs, // using the given aggOffset return getMetricTypeFunction.apply(aggs[aggOffset]); } } /** * Clear out maps to allow GC * NOTE: This is NOT thread-safe with add... so make sure all the adding is DONE before closing */ @Override public void close() { super.close(); closeAggregators(); aggregators.clear(); facts.clear(); if (selectors != null) { selectors.clear(); } if (combiningAggSelectors != null) { combiningAggSelectors.clear(); } } /** * Caches references to selector objects for each column instead of creating a new object each time in order to save * heap space. In general the selectorFactory need not to thread-safe. If required, set concurrentEventAdd to true to * use concurrent hash map instead of vanilla hash map for thread-safe operations. */ static class CachingColumnSelectorFactory implements ColumnSelectorFactory { private final HashMap<String, ColumnValueSelector<?>> columnSelectorMap; private final ColumnSelectorFactory delegate; public CachingColumnSelectorFactory(ColumnSelectorFactory delegate) { this.delegate = delegate; this.columnSelectorMap = new HashMap<>(); } @Override public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) { return delegate.makeDimensionSelector(dimensionSpec); } @Override public ColumnValueSelector<?> makeColumnValueSelector(String columnName) { ColumnValueSelector<?> existing = columnSelectorMap.get(columnName); if (existing != null) { return existing; } // We cannot use columnSelectorMap.computeIfAbsent(columnName, delegate::makeColumnValueSelector) // here since makeColumnValueSelector may modify the columnSelectorMap itself through // virtual column references, triggering a ConcurrentModificationException in JDK 9 and above. ColumnValueSelector<?> columnValueSelector = delegate.makeColumnValueSelector(columnName); existing = columnSelectorMap.putIfAbsent(columnName, columnValueSelector); return existing != null ? existing : columnValueSelector; } @Nullable @Override public ColumnCapabilities getColumnCapabilities(String columnName) { return delegate.getColumnCapabilities(columnName); } } public static class Builder extends AppendableIndexBuilder { @Override protected OnheapIncrementalIndex buildInner() { return new OnheapIncrementalIndex( Objects.requireNonNull(incrementalIndexSchema, "incrementIndexSchema is null"), maxRowCount, maxBytesInMemory, preserveExistingMetrics ); } } public static class Spec implements AppendableIndexSpec { private static final boolean DEFAULT_PRESERVE_EXISTING_METRICS = false; public static final String TYPE = "onheap"; // When set to true, for any row that already has metric (with the same name defined in metricSpec), // the metric aggregator in metricSpec is skipped and the existing metric is unchanged. If the row does not already have // the metric, then the metric aggregator is applied on the source column as usual. This should only be set for // DruidInputSource since that is the only case where we can have existing metrics. // This is currently only use by auto compaction and should not be use for anything else. final boolean preserveExistingMetrics; public Spec() { this.preserveExistingMetrics = DEFAULT_PRESERVE_EXISTING_METRICS; } @JsonCreator public Spec( final @JsonProperty("preserveExistingMetrics") @Nullable Boolean preserveExistingMetrics ) { this.preserveExistingMetrics = preserveExistingMetrics != null ? preserveExistingMetrics : DEFAULT_PRESERVE_EXISTING_METRICS; } @JsonProperty public boolean isPreserveExistingMetrics() { return preserveExistingMetrics; } @Override public AppendableIndexBuilder builder() { return new Builder().setPreserveExistingMetrics(preserveExistingMetrics); } @Override public long getDefaultMaxBytesInMemory() { // We initially estimated this to be 1/3(max jvm memory), but bytesCurrentlyInMemory only // tracks active index and not the index being flushed to disk, to account for that // we halved default to 1/6(max jvm memory) return JvmUtils.getRuntimeInfo().getMaxHeapSizeBytes() / 6; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Spec spec = (Spec) o; return preserveExistingMetrics == spec.preserveExistingMetrics; } @Override public int hashCode() { return Objects.hash(preserveExistingMetrics); } } static final class RollupFactsHolder implements FactsHolder { // Can't use Set because we need to be able to get from collection private final ConcurrentNavigableMap<IncrementalIndexRow, IncrementalIndexRow> facts; private final List<DimensionDesc> dimensionDescsList; private final boolean timeOrdered; private volatile long minTime = DateTimes.MAX.getMillis(); private volatile long maxTime = DateTimes.MIN.getMillis(); RollupFactsHolder( Comparator<IncrementalIndexRow> incrementalIndexRowComparator, List<DimensionDesc> dimensionDescsList, boolean timeOrdered ) { this.facts = new ConcurrentSkipListMap<>(incrementalIndexRowComparator); this.dimensionDescsList = dimensionDescsList; this.timeOrdered = timeOrdered; } @Override public int getPriorIndex(IncrementalIndexRow key) { IncrementalIndexRow row = facts.get(key); return row == null ? IncrementalIndexRow.EMPTY_ROW_INDEX : row.getRowIndex(); } @Override public long getMinTimeMillis() { return minTime; } @Override public long getMaxTimeMillis() { return maxTime; } @Override public Iterator<IncrementalIndexRow> iterator(boolean descending) { if (descending) { return facts.descendingMap() .keySet() .iterator(); } return keySet().iterator(); } @Override public Iterable<IncrementalIndexRow> timeRangeIterable(boolean descending, long timeStart, long timeEnd) { if (timeOrdered) { IncrementalIndexRow start = new IncrementalIndexRow(timeStart, new Object[]{}, dimensionDescsList); IncrementalIndexRow end = new IncrementalIndexRow(timeEnd, new Object[]{}, dimensionDescsList); ConcurrentNavigableMap<IncrementalIndexRow, IncrementalIndexRow> subMap = facts.subMap(start, end); ConcurrentMap<IncrementalIndexRow, IncrementalIndexRow> rangeMap = descending ? subMap.descendingMap() : subMap; return rangeMap.keySet(); } else { return Iterables.filter( facts.keySet(), row -> row.timestamp >= timeStart && row.timestamp < timeEnd ); } } @Override public Iterable<IncrementalIndexRow> keySet() { return facts.keySet(); } @Override public Iterable<IncrementalIndexRow> persistIterable() { // with rollup, facts are already pre-sorted so just return keyset return keySet(); } @Override public int putIfAbsent(IncrementalIndexRow key, int rowIndex) { // setRowIndex() must be called before facts.putIfAbsent() for visibility of rowIndex from concurrent readers. key.setRowIndex(rowIndex); minTime = Math.min(minTime, key.timestamp); maxTime = Math.max(maxTime, key.timestamp); IncrementalIndexRow prev = facts.putIfAbsent(key, key); return prev == null ? IncrementalIndexRow.EMPTY_ROW_INDEX : prev.getRowIndex(); } @Override public void clear() { facts.clear(); } } static final class PlainTimeOrderedFactsHolder implements FactsHolder { private final ConcurrentNavigableMap<Long, Deque<IncrementalIndexRow>> facts; private final Comparator<IncrementalIndexRow> incrementalIndexRowComparator; public PlainTimeOrderedFactsHolder(Comparator<IncrementalIndexRow> incrementalIndexRowComparator) { this.facts = new ConcurrentSkipListMap<>(); this.incrementalIndexRowComparator = incrementalIndexRowComparator; } @Override public int getPriorIndex(IncrementalIndexRow key) { // always return EMPTY_ROW_INDEX to indicate that no prior key cause we always add new row return IncrementalIndexRow.EMPTY_ROW_INDEX; } @Override public long getMinTimeMillis() { return facts.firstKey(); } @Override public long getMaxTimeMillis() { return facts.lastKey(); } @Override public Iterator<IncrementalIndexRow> iterator(boolean descending) { if (descending) { return timeOrderedConcat(facts.descendingMap().values(), true).iterator(); } return timeOrderedConcat(facts.values(), false).iterator(); } @Override public Iterable<IncrementalIndexRow> timeRangeIterable(boolean descending, long timeStart, long timeEnd) { ConcurrentNavigableMap<Long, Deque<IncrementalIndexRow>> subMap = facts.subMap(timeStart, timeEnd); final ConcurrentMap<Long, Deque<IncrementalIndexRow>> rangeMap = descending ? subMap.descendingMap() : subMap; return timeOrderedConcat(rangeMap.values(), descending); } private Iterable<IncrementalIndexRow> timeOrderedConcat( final Iterable<Deque<IncrementalIndexRow>> iterable, final boolean descending ) { return () -> Iterators.concat( Iterators.transform( iterable.iterator(), input -> descending ? input.descendingIterator() : input.iterator() ) ); } private Stream<IncrementalIndexRow> timeAndDimsOrderedConcat( final Collection<Deque<IncrementalIndexRow>> rowGroups ) { return rowGroups.stream() .flatMap(Collection::stream) .sorted(incrementalIndexRowComparator); } @Override public Iterable<IncrementalIndexRow> keySet() { return timeOrderedConcat(facts.values(), false); } @Override public Iterable<IncrementalIndexRow> persistIterable() { return () -> timeAndDimsOrderedConcat(facts.values()).iterator(); } @Override public int putIfAbsent(IncrementalIndexRow key, int rowIndex) { Long time = key.getTimestamp(); Deque<IncrementalIndexRow> rows = facts.get(time); if (rows == null) { facts.putIfAbsent(time, new ConcurrentLinkedDeque<>()); // in race condition, rows may be put by other thread, so always get latest status from facts rows = facts.get(time); } // setRowIndex() must be called before rows.add() for visibility of rowIndex from concurrent readers. key.setRowIndex(rowIndex); rows.add(key); // always return EMPTY_ROW_INDEX to indicate that we always add new row return IncrementalIndexRow.EMPTY_ROW_INDEX; } @Override public void clear() { facts.clear(); } } static final class PlainNonTimeOrderedFactsHolder implements FactsHolder { private final Deque<IncrementalIndexRow> facts; private final Comparator<IncrementalIndexRow> incrementalIndexRowComparator; private volatile long minTime = DateTimes.MAX.getMillis(); private volatile long maxTime = DateTimes.MIN.getMillis(); public PlainNonTimeOrderedFactsHolder(Comparator<IncrementalIndexRow> incrementalIndexRowComparator) { this.facts = new ArrayDeque<>(); this.incrementalIndexRowComparator = incrementalIndexRowComparator; } @Override public int getPriorIndex(IncrementalIndexRow key) { // always return EMPTY_ROW_INDEX to indicate that no prior key cause we always add new row return IncrementalIndexRow.EMPTY_ROW_INDEX; } @Override public long getMinTimeMillis() { return minTime; } @Override public long getMaxTimeMillis() { return maxTime; } @Override public Iterator<IncrementalIndexRow> iterator(boolean descending) { return descending ? facts.descendingIterator() : facts.iterator(); } @Override public Iterable<IncrementalIndexRow> timeRangeIterable(boolean descending, long timeStart, long timeEnd) { return Iterables.filter( () -> iterator(descending), row -> row.timestamp >= timeStart && row.timestamp < timeEnd ); } @Override public Iterable<IncrementalIndexRow> keySet() { return facts; } @Override public Iterable<IncrementalIndexRow> persistIterable() { final List<IncrementalIndexRow> sortedFacts = new ArrayList<>(facts); sortedFacts.sort(incrementalIndexRowComparator); return sortedFacts; } @Override public int putIfAbsent(IncrementalIndexRow key, int rowIndex) { // setRowIndex() must be called before rows.add() for visibility of rowIndex from concurrent readers. key.setRowIndex(rowIndex); minTime = Math.min(minTime, key.timestamp); maxTime = Math.max(maxTime, key.timestamp); facts.add(key); // always return EMPTY_ROW_INDEX to indicate that we always add new row return IncrementalIndexRow.EMPTY_ROW_INDEX; } @Override public void clear() { facts.clear(); } } }
apache/rocketmq
34,924
store/src/main/java/org/apache/rocketmq/store/ha/autoswitch/AutoSwitchHAConnection.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.store.ha.autoswitch; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.util.List; import org.apache.rocketmq.common.ServiceThread; import org.apache.rocketmq.common.constant.LoggerName; import org.apache.rocketmq.common.utils.NetworkUtil; import org.apache.rocketmq.logging.org.slf4j.Logger; import org.apache.rocketmq.logging.org.slf4j.LoggerFactory; import org.apache.rocketmq.remoting.netty.NettySystemConfig; import org.apache.rocketmq.remoting.protocol.EpochEntry; import org.apache.rocketmq.store.SelectMappedBufferResult; import org.apache.rocketmq.store.config.MessageStoreConfig; import org.apache.rocketmq.store.ha.FlowMonitor; import org.apache.rocketmq.store.ha.HAConnection; import org.apache.rocketmq.store.ha.HAConnectionState; import org.apache.rocketmq.store.ha.io.AbstractHAReader; import org.apache.rocketmq.store.ha.io.HAWriter; public class AutoSwitchHAConnection implements HAConnection { /** * Handshake data protocol in syncing msg from master. Format: * <pre> * ┌─────────────────┬───────────────┬───────────┬───────────┬────────────────────────────────────┐ * │ current state │ body size │ offset │ epoch │ EpochEntrySize * EpochEntryNums │ * │ (4bytes) │ (4bytes) │ (8bytes) │ (4bytes) │ (12bytes * EpochEntryNums) │ * ├─────────────────┴───────────────┴───────────┴───────────┼────────────────────────────────────┤ * │ Header │ Body │ * │ │ │ * </pre> * Handshake Header protocol Format: * current state + body size + offset + epoch */ public static final int HANDSHAKE_HEADER_SIZE = 4 + 4 + 8 + 4; /** * Transfer data protocol in syncing msg from master. Format: * <pre> * ┌─────────────────┬───────────────┬───────────┬───────────┬─────────────────────┬──────────────────┬──────────────────┐ * │ current state │ body size │ offset │ epoch │ epochStartOffset │ confirmOffset │ log data │ * │ (4bytes) │ (4bytes) │ (8bytes) │ (4bytes) │ (8bytes) │ (8bytes) │ (data size) │ * ├─────────────────┴───────────────┴───────────┴───────────┴─────────────────────┴──────────────────┼──────────────────┤ * │ Header │ Body │ * │ │ │ * </pre> * Transfer Header protocol Format: * current state + body size + offset + epoch + epochStartOffset + additionalInfo(confirmOffset) */ public static final int TRANSFER_HEADER_SIZE = HANDSHAKE_HEADER_SIZE + 8 + 8; public static final int EPOCH_ENTRY_SIZE = 12; private static final Logger LOGGER = LoggerFactory.getLogger(LoggerName.STORE_LOGGER_NAME); private final AutoSwitchHAService haService; private final SocketChannel socketChannel; private final String clientAddress; private final EpochFileCache epochCache; private final AbstractWriteSocketService writeSocketService; private final ReadSocketService readSocketService; private final FlowMonitor flowMonitor; private volatile HAConnectionState currentState = HAConnectionState.HANDSHAKE; private volatile long slaveRequestOffset = -1; private volatile long slaveAckOffset = -1; /** * Whether the slave have already sent a handshake message */ private volatile boolean isSlaveSendHandshake = false; private volatile int currentTransferEpoch = -1; private volatile long currentTransferEpochEndOffset = 0; private volatile boolean isSyncFromLastFile = false; private volatile boolean isAsyncLearner = false; private volatile long slaveId = -1; /** * Last endOffset when master transfer data to slave */ private volatile long lastMasterMaxOffset = -1; /** * Last time ms when transfer data to slave. */ private volatile long lastTransferTimeMs = 0; public AutoSwitchHAConnection(AutoSwitchHAService haService, SocketChannel socketChannel, EpochFileCache epochCache) throws IOException { this.haService = haService; this.socketChannel = socketChannel; this.epochCache = epochCache; this.clientAddress = this.socketChannel.socket().getRemoteSocketAddress().toString(); this.socketChannel.configureBlocking(false); this.socketChannel.socket().setSoLinger(false, -1); this.socketChannel.socket().setTcpNoDelay(true); if (NettySystemConfig.socketSndbufSize > 0) { this.socketChannel.socket().setReceiveBufferSize(NettySystemConfig.socketSndbufSize); } if (NettySystemConfig.socketRcvbufSize > 0) { this.socketChannel.socket().setSendBufferSize(NettySystemConfig.socketRcvbufSize); } this.writeSocketService = new WriteSocketService(this.socketChannel); this.readSocketService = new ReadSocketService(this.socketChannel); this.haService.getConnectionCount().incrementAndGet(); this.flowMonitor = new FlowMonitor(haService.getDefaultMessageStore().getMessageStoreConfig()); } @Override public void start() { changeCurrentState(HAConnectionState.HANDSHAKE); this.flowMonitor.start(); this.readSocketService.start(); this.writeSocketService.start(); } @Override public void shutdown() { changeCurrentState(HAConnectionState.SHUTDOWN); this.flowMonitor.shutdown(true); this.writeSocketService.shutdown(true); this.readSocketService.shutdown(true); this.close(); } @Override public void close() { if (this.socketChannel != null) { try { this.socketChannel.close(); } catch (final IOException e) { LOGGER.error("", e); } } } public void changeCurrentState(HAConnectionState connectionState) { LOGGER.info("change state to {}", connectionState); this.currentState = connectionState; } public long getSlaveId() { return slaveId; } @Override public HAConnectionState getCurrentState() { return currentState; } @Override public SocketChannel getSocketChannel() { return socketChannel; } @Override public String getClientAddress() { return clientAddress; } @Override public long getSlaveAckOffset() { return slaveAckOffset; } @Override public long getTransferredByteInSecond() { return flowMonitor.getTransferredByteInSecond(); } @Override public long getTransferFromWhere() { return this.writeSocketService.getNextTransferFromWhere(); } private void changeTransferEpochToNext(final EpochEntry entry) { this.currentTransferEpoch = entry.getEpoch(); this.currentTransferEpochEndOffset = entry.getEndOffset(); if (entry.getEpoch() == this.epochCache.lastEpoch()) { // Use -1 to stand for Long.max this.currentTransferEpochEndOffset = -1; } } public boolean isAsyncLearner() { return isAsyncLearner; } public boolean isSyncFromLastFile() { return isSyncFromLastFile; } private synchronized void updateLastTransferInfo() { this.lastMasterMaxOffset = this.haService.getDefaultMessageStore().getMaxPhyOffset(); this.lastTransferTimeMs = System.currentTimeMillis(); } private synchronized void maybeExpandInSyncStateSet(long slaveMaxOffset) { if (!this.isAsyncLearner && slaveMaxOffset >= this.lastMasterMaxOffset) { long caughtUpTimeMs = this.haService.getDefaultMessageStore().getMaxPhyOffset() == slaveMaxOffset ? System.currentTimeMillis() : this.lastTransferTimeMs; this.haService.updateConnectionLastCaughtUpTime(this.slaveId, caughtUpTimeMs); this.haService.maybeExpandInSyncStateSet(this.slaveId, slaveMaxOffset); } } class ReadSocketService extends ServiceThread { private static final int READ_MAX_BUFFER_SIZE = 1024 * 1024; private final Selector selector; private final SocketChannel socketChannel; private final ByteBuffer byteBufferRead = ByteBuffer.allocate(READ_MAX_BUFFER_SIZE); private final AbstractHAReader haReader; private int processPosition = 0; private volatile long lastReadTimestamp = System.currentTimeMillis(); public ReadSocketService(final SocketChannel socketChannel) throws IOException { this.selector = NetworkUtil.openSelector(); this.socketChannel = socketChannel; this.socketChannel.register(this.selector, SelectionKey.OP_READ); this.setDaemon(true); haReader = new HAServerReader(); haReader.registerHook(readSize -> { if (readSize > 0) { ReadSocketService.this.lastReadTimestamp = haService.getDefaultMessageStore().getSystemClock().now(); } }); } @Override public void run() { LOGGER.info(this.getServiceName() + " service started"); while (!this.isStopped()) { try { this.selector.select(1000); boolean ok = this.haReader.read(this.socketChannel, this.byteBufferRead); if (!ok) { AutoSwitchHAConnection.LOGGER.error("processReadEvent error"); break; } long interval = haService.getDefaultMessageStore().getSystemClock().now() - this.lastReadTimestamp; if (interval > haService.getDefaultMessageStore().getMessageStoreConfig().getHaHousekeepingInterval()) { LOGGER.warn("ha housekeeping, found this connection[" + clientAddress + "] expired, " + interval); break; } } catch (Exception e) { AutoSwitchHAConnection.LOGGER.error(this.getServiceName() + " service has exception.", e); break; } } this.makeStop(); changeCurrentState(HAConnectionState.SHUTDOWN); writeSocketService.makeStop(); haService.removeConnection(AutoSwitchHAConnection.this); haService.getConnectionCount().decrementAndGet(); SelectionKey sk = this.socketChannel.keyFor(this.selector); if (sk != null) { sk.cancel(); } try { this.selector.close(); this.socketChannel.close(); } catch (IOException e) { AutoSwitchHAConnection.LOGGER.error("", e); } flowMonitor.shutdown(true); AutoSwitchHAConnection.LOGGER.info(this.getServiceName() + " service end"); } @Override public String getServiceName() { if (haService.getDefaultMessageStore().getBrokerConfig().isInBrokerContainer()) { return haService.getDefaultMessageStore().getBrokerIdentity().getIdentifier() + ReadSocketService.class.getSimpleName(); } return ReadSocketService.class.getSimpleName(); } class HAServerReader extends AbstractHAReader { @Override protected boolean processReadResult(ByteBuffer byteBufferRead) { while (true) { boolean processSuccess = true; int readSocketPos = byteBufferRead.position(); int diff = byteBufferRead.position() - ReadSocketService.this.processPosition; if (diff >= AutoSwitchHAClient.MIN_HEADER_SIZE) { int readPosition = ReadSocketService.this.processPosition; HAConnectionState slaveState = HAConnectionState.values()[byteBufferRead.getInt(readPosition)]; switch (slaveState) { case HANDSHAKE: // SlaveBrokerId Long slaveBrokerId = byteBufferRead.getLong(readPosition + AutoSwitchHAClient.HANDSHAKE_HEADER_SIZE - 8); AutoSwitchHAConnection.this.slaveId = slaveBrokerId; // Flag(isSyncFromLastFile) short syncFromLastFileFlag = byteBufferRead.getShort(readPosition + AutoSwitchHAClient.HANDSHAKE_HEADER_SIZE - 12); if (syncFromLastFileFlag == 1) { AutoSwitchHAConnection.this.isSyncFromLastFile = true; } // Flag(isAsyncLearner role) short isAsyncLearner = byteBufferRead.getShort(readPosition + AutoSwitchHAClient.HANDSHAKE_HEADER_SIZE - 10); if (isAsyncLearner == 1) { AutoSwitchHAConnection.this.isAsyncLearner = true; } isSlaveSendHandshake = true; byteBufferRead.position(readSocketPos); ReadSocketService.this.processPosition += AutoSwitchHAClient.HANDSHAKE_HEADER_SIZE; LOGGER.info("Receive slave handshake, slaveBrokerId:{}, isSyncFromLastFile:{}, isAsyncLearner:{}", AutoSwitchHAConnection.this.slaveId, AutoSwitchHAConnection.this.isSyncFromLastFile, AutoSwitchHAConnection.this.isAsyncLearner); break; case TRANSFER: long slaveMaxOffset = byteBufferRead.getLong(readPosition + 4); ReadSocketService.this.processPosition += AutoSwitchHAClient.TRANSFER_HEADER_SIZE; AutoSwitchHAConnection.this.slaveAckOffset = slaveMaxOffset; if (slaveRequestOffset < 0) { slaveRequestOffset = slaveMaxOffset; } byteBufferRead.position(readSocketPos); maybeExpandInSyncStateSet(slaveMaxOffset); AutoSwitchHAConnection.this.haService.updateConfirmOffsetWhenSlaveAck(AutoSwitchHAConnection.this.slaveId); AutoSwitchHAConnection.this.haService.notifyTransferSome(AutoSwitchHAConnection.this.slaveAckOffset); break; default: LOGGER.error("Current state illegal {}", currentState); return false; } if (!slaveState.equals(currentState)) { LOGGER.warn("Master change state from {} to {}", currentState, slaveState); changeCurrentState(slaveState); } if (processSuccess) { continue; } } if (!byteBufferRead.hasRemaining()) { byteBufferRead.position(ReadSocketService.this.processPosition); byteBufferRead.compact(); ReadSocketService.this.processPosition = 0; } break; } return true; } } } class WriteSocketService extends AbstractWriteSocketService { private SelectMappedBufferResult selectMappedBufferResult; public WriteSocketService(final SocketChannel socketChannel) throws IOException { super(socketChannel); } @Override protected int getNextTransferDataSize() { SelectMappedBufferResult selectResult = haService.getDefaultMessageStore().getCommitLogData(this.nextTransferFromWhere); if (selectResult == null || selectResult.getSize() <= 0) { return 0; } this.selectMappedBufferResult = selectResult; return selectResult.getSize(); } @Override protected void releaseData() { this.selectMappedBufferResult.release(); this.selectMappedBufferResult = null; } @Override protected boolean transferData(int maxTransferSize) throws Exception { if (null != this.selectMappedBufferResult && maxTransferSize >= 0) { this.selectMappedBufferResult.getByteBuffer().limit(maxTransferSize); } // Write Header boolean result = haWriter.write(this.socketChannel, this.byteBufferHeader); if (!result) { return false; } if (null == this.selectMappedBufferResult) { return true; } // Write Body result = haWriter.write(this.socketChannel, this.selectMappedBufferResult.getByteBuffer()); if (result) { releaseData(); } return result; } @Override protected void onStop() { if (this.selectMappedBufferResult != null) { this.selectMappedBufferResult.release(); } } @Override public String getServiceName() { if (haService.getDefaultMessageStore().getBrokerConfig().isInBrokerContainer()) { return haService.getDefaultMessageStore().getBrokerIdentity().getIdentifier() + WriteSocketService.class.getSimpleName(); } return WriteSocketService.class.getSimpleName(); } } abstract class AbstractWriteSocketService extends ServiceThread { protected final Selector selector; protected final SocketChannel socketChannel; protected final HAWriter haWriter; protected final ByteBuffer byteBufferHeader = ByteBuffer.allocate(TRANSFER_HEADER_SIZE); // Store master epochFileCache: (Epoch + startOffset) * 1000 private final ByteBuffer handShakeBuffer = ByteBuffer.allocate(EPOCH_ENTRY_SIZE * 1000); protected long nextTransferFromWhere = -1; protected boolean lastWriteOver = true; protected long lastWriteTimestamp = System.currentTimeMillis(); protected long lastPrintTimestamp = System.currentTimeMillis(); protected long transferOffset = 0; public AbstractWriteSocketService(final SocketChannel socketChannel) throws IOException { this.selector = NetworkUtil.openSelector(); this.socketChannel = socketChannel; this.socketChannel.register(this.selector, SelectionKey.OP_WRITE); this.setDaemon(true); haWriter = new HAWriter(); haWriter.registerHook(writeSize -> { flowMonitor.addByteCountTransferred(writeSize); if (writeSize > 0) { AbstractWriteSocketService.this.lastWriteTimestamp = haService.getDefaultMessageStore().getSystemClock().now(); } }); } public long getNextTransferFromWhere() { return this.nextTransferFromWhere; } private boolean buildHandshakeBuffer() { final List<EpochEntry> epochEntries = AutoSwitchHAConnection.this.epochCache.getAllEntries(); final int lastEpoch = AutoSwitchHAConnection.this.epochCache.lastEpoch(); final long maxPhyOffset = AutoSwitchHAConnection.this.haService.getDefaultMessageStore().getMaxPhyOffset(); this.byteBufferHeader.position(0); this.byteBufferHeader.limit(HANDSHAKE_HEADER_SIZE); // State this.byteBufferHeader.putInt(currentState.ordinal()); // Body size this.byteBufferHeader.putInt(epochEntries.size() * EPOCH_ENTRY_SIZE); // Offset this.byteBufferHeader.putLong(maxPhyOffset); // Epoch this.byteBufferHeader.putInt(lastEpoch); this.byteBufferHeader.flip(); // EpochEntries this.handShakeBuffer.position(0); this.handShakeBuffer.limit(EPOCH_ENTRY_SIZE * epochEntries.size()); for (final EpochEntry entry : epochEntries) { if (entry != null) { this.handShakeBuffer.putInt(entry.getEpoch()); this.handShakeBuffer.putLong(entry.getStartOffset()); } } this.handShakeBuffer.flip(); LOGGER.info("Master build handshake header: maxEpoch:{}, maxOffset:{}, epochEntries:{}", lastEpoch, maxPhyOffset, epochEntries); return true; } private boolean handshakeWithSlave() throws IOException { // Write Header boolean result = this.haWriter.write(this.socketChannel, this.byteBufferHeader); if (!result) { return false; } // Write Body return this.haWriter.write(this.socketChannel, this.handShakeBuffer); } // Normal transfer method private void buildTransferHeaderBuffer(long nextOffset, int bodySize) { EpochEntry entry = AutoSwitchHAConnection.this.epochCache.getEntry(AutoSwitchHAConnection.this.currentTransferEpoch); if (entry == null) { // If broker is started on empty disk and no message entered (nextOffset = -1 and currentTransferEpoch = -1), do not output error log when sending heartbeat if (nextOffset != -1 || currentTransferEpoch != -1 || bodySize > 0) { LOGGER.error("Failed to find epochEntry with epoch {} when build msg header", AutoSwitchHAConnection.this.currentTransferEpoch); } if (bodySize > 0) { return; } // Maybe it's used for heartbeat entry = AutoSwitchHAConnection.this.epochCache.firstEntry(); } // Build Header this.byteBufferHeader.position(0); this.byteBufferHeader.limit(TRANSFER_HEADER_SIZE); // State this.byteBufferHeader.putInt(currentState.ordinal()); // Body size this.byteBufferHeader.putInt(bodySize); // Offset this.byteBufferHeader.putLong(nextOffset); // Epoch this.byteBufferHeader.putInt(entry.getEpoch()); // EpochStartOffset this.byteBufferHeader.putLong(entry.getStartOffset()); // Additional info(confirm offset) final long confirmOffset = AutoSwitchHAConnection.this.haService.getDefaultMessageStore().getConfirmOffset(); this.byteBufferHeader.putLong(confirmOffset); this.byteBufferHeader.flip(); } private boolean sendHeartbeatIfNeeded() throws Exception { long interval = haService.getDefaultMessageStore().getSystemClock().now() - this.lastWriteTimestamp; if (interval > haService.getDefaultMessageStore().getMessageStoreConfig().getHaSendHeartbeatInterval()) { buildTransferHeaderBuffer(this.nextTransferFromWhere, 0); return this.transferData(0); } return true; } private void transferToSlave() throws Exception { if (this.lastWriteOver) { this.lastWriteOver = sendHeartbeatIfNeeded(); } else { // maxTransferSize == -1 means to continue transfer remaining data. this.lastWriteOver = this.transferData(-1); } if (!this.lastWriteOver) { return; } int size = this.getNextTransferDataSize(); if (size > 0) { if (size > haService.getDefaultMessageStore().getMessageStoreConfig().getHaTransferBatchSize()) { size = haService.getDefaultMessageStore().getMessageStoreConfig().getHaTransferBatchSize(); } int canTransferMaxBytes = flowMonitor.canTransferMaxByteNum(); if (size > canTransferMaxBytes) { if (System.currentTimeMillis() - lastPrintTimestamp > 1000) { LOGGER.warn("Trigger HA flow control, max transfer speed {}KB/s, current speed: {}KB/s", String.format("%.2f", flowMonitor.maxTransferByteInSecond() / 1024.0), String.format("%.2f", flowMonitor.getTransferredByteInSecond() / 1024.0)); lastPrintTimestamp = System.currentTimeMillis(); } size = canTransferMaxBytes; } if (size <= 0) { this.releaseData(); this.waitForRunning(100); return; } // Check and update currentTransferEpochEndOffset if (AutoSwitchHAConnection.this.currentTransferEpochEndOffset == -1) { EpochEntry currentEpochEntry = AutoSwitchHAConnection.this.epochCache.getEntry(AutoSwitchHAConnection.this.currentTransferEpoch); if (currentEpochEntry != null) { if (currentEpochEntry.getEndOffset() != EpochEntry.LAST_EPOCH_END_OFFSET) { LOGGER.info("Update currentTransferEpochEndOffset from -1 to {}", currentEpochEntry.getEndOffset()); AutoSwitchHAConnection.this.currentTransferEpochEndOffset = currentEpochEntry.getEndOffset(); } } else { // we should never reach here LOGGER.warn("[BUG]Can't find currentTransferEpoch [{}] from epoch cache", currentTransferEpoch); } } // We must ensure that the transmitted logs are within the same epoch // If currentEpochEndOffset == -1, means that currentTransferEpoch = last epoch, so the endOffset = Long.max final long currentEpochEndOffset = AutoSwitchHAConnection.this.currentTransferEpochEndOffset; if (currentEpochEndOffset != -1 && this.nextTransferFromWhere + size > currentEpochEndOffset) { final EpochEntry epochEntry = AutoSwitchHAConnection.this.epochCache.nextEntry(AutoSwitchHAConnection.this.currentTransferEpoch); if (epochEntry == null) { LOGGER.error("Can't find a bigger epochEntry than epoch {}", AutoSwitchHAConnection.this.currentTransferEpoch); waitForRunning(100); return; } size = (int) (currentEpochEndOffset - this.nextTransferFromWhere); changeTransferEpochToNext(epochEntry); } this.transferOffset = this.nextTransferFromWhere; this.nextTransferFromWhere += size; updateLastTransferInfo(); // Build Header buildTransferHeaderBuffer(this.transferOffset, size); this.lastWriteOver = this.transferData(size); } else { // If size == 0, we should update the lastCatchupTimeMs AutoSwitchHAConnection.this.haService.updateConnectionLastCaughtUpTime(AutoSwitchHAConnection.this.slaveId, System.currentTimeMillis()); haService.getWaitNotifyObject().allWaitForRunning(100); } } @Override public void run() { AutoSwitchHAConnection.LOGGER.info(this.getServiceName() + " service started"); while (!this.isStopped()) { try { this.selector.select(1000); switch (currentState) { case HANDSHAKE: // Wait until the slave send it handshake msg to master. if (!isSlaveSendHandshake) { this.waitForRunning(10); continue; } if (this.lastWriteOver) { if (!buildHandshakeBuffer()) { LOGGER.error("AutoSwitchHAConnection build handshake buffer failed"); this.waitForRunning(5000); continue; } } this.lastWriteOver = handshakeWithSlave(); if (this.lastWriteOver) { // change flag to {false} to wait for slave notification isSlaveSendHandshake = false; } break; case TRANSFER: if (-1 == slaveRequestOffset) { this.waitForRunning(10); continue; } if (-1 == this.nextTransferFromWhere) { if (0 == slaveRequestOffset) { // We must ensure that the starting point of syncing log // must be the startOffset of a file (maybe the last file, or the minOffset) final MessageStoreConfig config = haService.getDefaultMessageStore().getMessageStoreConfig(); if (AutoSwitchHAConnection.this.isSyncFromLastFile) { long masterOffset = haService.getDefaultMessageStore().getCommitLog().getMaxOffset(); masterOffset = masterOffset - (masterOffset % config.getMappedFileSizeCommitLog()); if (masterOffset < 0) { masterOffset = 0; } this.nextTransferFromWhere = masterOffset; } else { this.nextTransferFromWhere = haService.getDefaultMessageStore().getCommitLog().getMinOffset(); } } else { this.nextTransferFromWhere = slaveRequestOffset; } // nextTransferFromWhere is not found. It may be empty disk and no message is entered if (this.nextTransferFromWhere == -1) { sendHeartbeatIfNeeded(); waitForRunning(500); break; } // Setup initial transferEpoch EpochEntry epochEntry = AutoSwitchHAConnection.this.epochCache.findEpochEntryByOffset(this.nextTransferFromWhere); if (epochEntry == null) { LOGGER.error("Failed to find an epochEntry to match nextTransferFromWhere {}", this.nextTransferFromWhere); sendHeartbeatIfNeeded(); waitForRunning(500); break; } changeTransferEpochToNext(epochEntry); LOGGER.info("Master transfer data to slave {}, from offset:{}, currentEpoch:{}", AutoSwitchHAConnection.this.clientAddress, this.nextTransferFromWhere, epochEntry); } transferToSlave(); break; default: throw new Exception("unexpected state " + currentState); } } catch (Exception e) { AutoSwitchHAConnection.LOGGER.error(this.getServiceName() + " service has exception.", e); break; } } this.onStop(); changeCurrentState(HAConnectionState.SHUTDOWN); this.makeStop(); readSocketService.makeStop(); haService.removeConnection(AutoSwitchHAConnection.this); SelectionKey sk = this.socketChannel.keyFor(this.selector); if (sk != null) { sk.cancel(); } try { this.selector.close(); this.socketChannel.close(); } catch (IOException e) { AutoSwitchHAConnection.LOGGER.error("", e); } flowMonitor.shutdown(true); AutoSwitchHAConnection.LOGGER.info(this.getServiceName() + " service end"); } abstract protected int getNextTransferDataSize(); abstract protected void releaseData(); abstract protected boolean transferData(int maxTransferSize) throws Exception; abstract protected void onStop(); } }
apache/iotdb
35,790
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/consensus/statemachine/schemaregion/SchemaExecutionVisitor.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.db.consensus.statemachine.schemaregion; import org.apache.iotdb.common.rpc.thrift.TSStatus; import org.apache.iotdb.commons.conf.IoTDBConstant; import org.apache.iotdb.commons.consensus.SchemaRegionId; import org.apache.iotdb.commons.exception.MetadataException; import org.apache.iotdb.commons.path.MeasurementPath; import org.apache.iotdb.commons.path.PartialPath; import org.apache.iotdb.commons.schema.view.viewExpression.ViewExpression; import org.apache.iotdb.db.exception.metadata.MeasurementAlreadyExistException; import org.apache.iotdb.db.exception.metadata.template.TemplateIsInUseException; import org.apache.iotdb.db.pipe.agent.PipeDataNodeAgent; import org.apache.iotdb.db.pipe.source.schemaregion.SchemaRegionListeningQueue; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanVisitor; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.WritePlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.ActivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.AlterTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.BatchActivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.ConstructSchemaBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.CreateAlignedTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.CreateMultiTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.CreateTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.DeactivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.DeleteTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.InternalBatchActivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.InternalCreateMultiTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.InternalCreateTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.MeasurementGroup; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.PreDeactivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.RollbackPreDeactivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.RollbackSchemaBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.AlterLogicalViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.ConstructLogicalViewBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.CreateLogicalViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.DeleteLogicalViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.RollbackLogicalViewBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeEnrichedNonWritePlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeEnrichedWritePlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeOperateSchemaQueueNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.ConstructTableDevicesBlackListNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.CreateOrUpdateTableDeviceNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.DeleteTableDeviceNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.DeleteTableDevicesInBlackListNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.RollbackTableDevicesBlackListNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableAttributeColumnDropNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableDeviceAttributeCommitUpdateNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableDeviceAttributeUpdateNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableNodeLocationAddNode; import org.apache.iotdb.db.schemaengine.schemaregion.ISchemaRegion; import org.apache.iotdb.db.schemaengine.schemaregion.write.req.ICreateAlignedTimeSeriesPlan; import org.apache.iotdb.db.schemaengine.schemaregion.write.req.ICreateTimeSeriesPlan; import org.apache.iotdb.db.schemaengine.schemaregion.write.req.SchemaRegionWritePlanFactory; import org.apache.iotdb.db.schemaengine.schemaregion.write.req.impl.CreateAlignedTimeSeriesPlanImpl; import org.apache.iotdb.db.schemaengine.schemaregion.write.req.impl.CreateTimeSeriesPlanImpl; import org.apache.iotdb.db.schemaengine.template.ClusterTemplateManager; import org.apache.iotdb.db.schemaengine.template.Template; import org.apache.iotdb.rpc.RpcUtils; import org.apache.iotdb.rpc.TSStatusCode; import org.apache.tsfile.enums.TSDataType; import org.apache.tsfile.file.metadata.enums.CompressionType; import org.apache.tsfile.file.metadata.enums.TSEncoding; import org.apache.tsfile.utils.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; /** Schema write {@link PlanNode} visitor */ public class SchemaExecutionVisitor extends PlanVisitor<TSStatus, ISchemaRegion> { private static final Logger logger = LoggerFactory.getLogger(SchemaExecutionVisitor.class); @Override public TSStatus visitCreateTimeSeries( final CreateTimeSeriesNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.createTimeSeries(node, -1); } catch (final MetadataException e) { logger.error("{}: MetaData error: ", IoTDBConstant.GLOBAL_DB_NAME, e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } @Override public TSStatus visitCreateAlignedTimeSeries( final CreateAlignedTimeSeriesNode node, final ISchemaRegion schemaRegion) { try { if (node.isGeneratedByPipe()) { final ICreateAlignedTimeSeriesPlan plan = SchemaRegionWritePlanFactory.getCreateAlignedTimeSeriesPlan( node.getDevicePath(), node.getMeasurements(), node.getDataTypes(), node.getEncodings(), node.getCompressors(), node.getAliasList(), node.getTagsList(), node.getAttributesList()); ((CreateAlignedTimeSeriesPlanImpl) plan).setWithMerge(true); schemaRegion.createAlignedTimeSeries(plan); } else { schemaRegion.createAlignedTimeSeries(node); } } catch (final MetadataException e) { logger.error("{}: MetaData error: ", IoTDBConstant.GLOBAL_DB_NAME, e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } @Override public TSStatus visitCreateMultiTimeSeries( final CreateMultiTimeSeriesNode node, final ISchemaRegion schemaRegion) { final Map<PartialPath, MeasurementGroup> measurementGroupMap = node.getMeasurementGroupMap(); final List<TSStatus> failingStatus = new ArrayList<>(); PartialPath devicePath; MeasurementGroup measurementGroup; int size; for (final Map.Entry<PartialPath, MeasurementGroup> entry : measurementGroupMap.entrySet()) { devicePath = entry.getKey(); measurementGroup = entry.getValue(); size = measurementGroup.getMeasurements().size(); // todo implement batch creation of one device in SchemaRegion for (int i = 0; i < size; i++) { try { final ICreateTimeSeriesPlan createTimeSeriesPlan = transformToCreateTimeSeriesPlan(devicePath, measurementGroup, i); ((CreateTimeSeriesPlanImpl) createTimeSeriesPlan).setWithMerge(node.isGeneratedByPipe()); schemaRegion.createTimeSeries(createTimeSeriesPlan, -1); } catch (final MetadataException e) { logger.error("{}: MetaData error: ", IoTDBConstant.GLOBAL_DB_NAME, e); failingStatus.add(RpcUtils.getStatus(e.getErrorCode(), e.getMessage())); } } } if (!failingStatus.isEmpty()) { return RpcUtils.getStatus(failingStatus); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } private ICreateTimeSeriesPlan transformToCreateTimeSeriesPlan( final PartialPath devicePath, final MeasurementGroup measurementGroup, final int index) { return SchemaRegionWritePlanFactory.getCreateTimeSeriesPlan( devicePath.concatAsMeasurementPath(measurementGroup.getMeasurements().get(index)), measurementGroup.getDataTypes().get(index), measurementGroup.getEncodings().get(index), measurementGroup.getCompressors().get(index), measurementGroup.getPropsList() == null ? null : measurementGroup.getPropsList().get(index), measurementGroup.getTagsList() == null ? null : measurementGroup.getTagsList().get(index), measurementGroup.getAttributesList() == null ? null : measurementGroup.getAttributesList().get(index), measurementGroup.getAliasList() == null ? null : measurementGroup.getAliasList().get(index)); } @Override public TSStatus visitInternalCreateTimeSeries( final InternalCreateTimeSeriesNode node, final ISchemaRegion schemaRegion) { final PartialPath devicePath = node.getDevicePath(); final MeasurementGroup measurementGroup = node.getMeasurementGroup(); final List<TSStatus> alreadyExistingTimeSeries = new ArrayList<>(); final List<TSStatus> failingStatus = new ArrayList<>(); if (node.isAligned()) { executeInternalCreateAlignedTimeSeries( devicePath, measurementGroup, schemaRegion, alreadyExistingTimeSeries, failingStatus, node.isGeneratedByPipe()); } else { executeInternalCreateTimeSeries( devicePath, measurementGroup, schemaRegion, alreadyExistingTimeSeries, failingStatus, node.isGeneratedByPipe()); } if (!failingStatus.isEmpty()) { return RpcUtils.getStatus(failingStatus); } if (!alreadyExistingTimeSeries.isEmpty()) { return RpcUtils.getStatus(alreadyExistingTimeSeries); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } @Override public TSStatus visitInternalCreateMultiTimeSeries( final InternalCreateMultiTimeSeriesNode node, final ISchemaRegion schemaRegion) { PartialPath devicePath; MeasurementGroup measurementGroup; final List<TSStatus> alreadyExistingTimeSeries = new ArrayList<>(); final List<TSStatus> failingStatus = new ArrayList<>(); for (final Map.Entry<PartialPath, Pair<Boolean, MeasurementGroup>> deviceEntry : node.getDeviceMap().entrySet()) { devicePath = deviceEntry.getKey(); measurementGroup = deviceEntry.getValue().right; if (Boolean.TRUE.equals(deviceEntry.getValue().left)) { executeInternalCreateAlignedTimeSeries( devicePath, measurementGroup, schemaRegion, alreadyExistingTimeSeries, failingStatus, node.isGeneratedByPipe()); } else { executeInternalCreateTimeSeries( devicePath, measurementGroup, schemaRegion, alreadyExistingTimeSeries, failingStatus, node.isGeneratedByPipe()); } } if (!failingStatus.isEmpty()) { return RpcUtils.getStatus(failingStatus); } if (!alreadyExistingTimeSeries.isEmpty()) { return RpcUtils.getStatus(alreadyExistingTimeSeries); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } private void executeInternalCreateTimeSeries( final PartialPath devicePath, final MeasurementGroup measurementGroup, final ISchemaRegion schemaRegion, final List<TSStatus> alreadyExistingTimeSeries, final List<TSStatus> failingStatus, final boolean withMerge) { final int size = measurementGroup.getMeasurements().size(); // todo implement batch creation of one device in SchemaRegion for (int i = 0; i < size; i++) { try { final ICreateTimeSeriesPlan createTimeSeriesPlan = transformToCreateTimeSeriesPlan(devicePath, measurementGroup, i); // With merge is only true for pipe to upsert the receiver alias/tags/attributes in // historical transfer. // For normal internal creation, the alias/tags/attributes are not set // Thus the original ones are not altered ((CreateTimeSeriesPlanImpl) createTimeSeriesPlan).setWithMerge(withMerge); schemaRegion.createTimeSeries(createTimeSeriesPlan, -1); } catch (final MeasurementAlreadyExistException e) { // There's no need to internal create time series. alreadyExistingTimeSeries.add( RpcUtils.getStatus( e.getErrorCode(), MeasurementPath.transformDataToString(e.getMeasurementPath()))); } catch (final MetadataException e) { logger.warn("{}: MetaData error: ", e.getMessage(), e); failingStatus.add(RpcUtils.getStatus(e.getErrorCode(), e.getMessage())); } } } private void executeInternalCreateAlignedTimeSeries( final PartialPath devicePath, final MeasurementGroup measurementGroup, final ISchemaRegion schemaRegion, final List<TSStatus> alreadyExistingTimeSeries, final List<TSStatus> failingStatus, final boolean withMerge) { final List<String> measurementList = measurementGroup.getMeasurements(); final List<TSDataType> dataTypeList = measurementGroup.getDataTypes(); final List<TSEncoding> encodingList = measurementGroup.getEncodings(); final List<CompressionType> compressionTypeList = measurementGroup.getCompressors(); final List<String> aliasList = measurementGroup.getAliasList(); final List<Map<String, String>> tagsList = measurementGroup.getTagsList(); final List<Map<String, String>> attributesList = measurementGroup.getAttributesList(); final ICreateAlignedTimeSeriesPlan createAlignedTimeSeriesPlan = SchemaRegionWritePlanFactory.getCreateAlignedTimeSeriesPlan( devicePath, measurementList, dataTypeList, encodingList, compressionTypeList, aliasList, tagsList, attributesList); // With merge is only true for pipe to upsert the receiver alias/tags/attributes in historical // transfer. // For normal internal creation, the alias/tags/attributes are not set // Thus the original ones are not altered ((CreateAlignedTimeSeriesPlanImpl) createAlignedTimeSeriesPlan).setWithMerge(withMerge); boolean shouldRetry = true; while (shouldRetry) { try { schemaRegion.createAlignedTimeSeries(createAlignedTimeSeriesPlan); shouldRetry = false; } catch (final MeasurementAlreadyExistException e) { // The existence check will be executed before truly creation // There's no need to internal create time series. final MeasurementPath measurementPath = e.getMeasurementPath(); alreadyExistingTimeSeries.add( RpcUtils.getStatus( e.getErrorCode(), MeasurementPath.transformDataToString(e.getMeasurementPath()))); // remove the existing time series from plan final int index = measurementList.indexOf(measurementPath.getMeasurement()); measurementList.remove(index); dataTypeList.remove(index); encodingList.remove(index); compressionTypeList.remove(index); if (Objects.nonNull(aliasList)) { aliasList.remove(index); } if (Objects.nonNull(tagsList)) { tagsList.remove(index); } if (Objects.nonNull(attributesList)) { attributesList.remove(index); } // If with merge is set, the lists are deep copied and need to be altered here. // We still remove the element from the original list to help cascading pipe transfer // schema. // If this exception is thrown, the measurements, data types, etc. must be unchanged. // Thus, the index for the copied lists are identical to that in the original lists. if (withMerge) { createAlignedTimeSeriesPlan.getMeasurements().remove(index); createAlignedTimeSeriesPlan.getDataTypes().remove(index); createAlignedTimeSeriesPlan.getEncodings().remove(index); createAlignedTimeSeriesPlan.getCompressors().remove(index); if (Objects.nonNull(aliasList)) { createAlignedTimeSeriesPlan.getAliasList().remove(index); } if (Objects.nonNull(tagsList)) { createAlignedTimeSeriesPlan.getTagsList().remove(index); } if (Objects.nonNull(attributesList)) { createAlignedTimeSeriesPlan.getAttributesList().remove(index); } } if (measurementList.isEmpty()) { shouldRetry = false; } } catch (final MetadataException e) { logger.warn("{}: MetaData error: ", e.getMessage(), e); failingStatus.add(RpcUtils.getStatus(e.getErrorCode(), e.getMessage())); shouldRetry = false; } } } @Override public TSStatus visitAlterTimeSeries( final AlterTimeSeriesNode node, final ISchemaRegion schemaRegion) { try { switch (node.getAlterType()) { case RENAME: String beforeName = node.getAlterMap().keySet().iterator().next(); String currentName = node.getAlterMap().get(beforeName); schemaRegion.renameTagOrAttributeKey(beforeName, currentName, node.getPath()); break; case SET: schemaRegion.setTagsOrAttributesValue(node.getAlterMap(), node.getPath()); break; case DROP: schemaRegion.dropTagsOrAttributes(node.getAlterMap().keySet(), node.getPath()); break; case ADD_TAGS: schemaRegion.addTags(node.getAlterMap(), node.getPath()); break; case ADD_ATTRIBUTES: schemaRegion.addAttributes(node.getAlterMap(), node.getPath()); break; case UPSERT: schemaRegion.upsertAliasAndTagsAndAttributes( node.getAlias(), node.getTagsMap(), node.getAttributesMap(), node.getPath()); break; } } catch (MetadataException e) { logger.error("{}: MetaData error: ", IoTDBConstant.GLOBAL_DB_NAME, e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } catch (IOException e) { logger.error("{}: IO error: ", IoTDBConstant.GLOBAL_DB_NAME, e); return RpcUtils.getStatus(TSStatusCode.INTERNAL_SERVER_ERROR, e.getMessage()); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } @Override public TSStatus visitActivateTemplate( final ActivateTemplateNode node, final ISchemaRegion schemaRegion) { try { final Template template = ClusterTemplateManager.getInstance().getTemplate(node.getTemplateId()); if (Objects.isNull(template)) { return new TSStatus(TSStatusCode.UNDEFINED_TEMPLATE.getStatusCode()) .setMessage( "The template is null when trying to activate template, may be the template is being unset."); } schemaRegion.activateSchemaTemplate(node, template); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitBatchActivateTemplate( final BatchActivateTemplateNode node, final ISchemaRegion schemaRegion) { final List<TSStatus> statusList = new ArrayList<>(); final List<PartialPath> alreadyActivatedDeviceList = new ArrayList<>(); for (final Map.Entry<PartialPath, Pair<Integer, Integer>> entry : node.getTemplateActivationMap().entrySet()) { final Template template = ClusterTemplateManager.getInstance().getTemplate(entry.getValue().left); if (Objects.isNull(template)) { return new TSStatus(TSStatusCode.UNDEFINED_TEMPLATE.getStatusCode()) .setMessage( "The template is null when trying to activate template, may be the template is being unset."); } try { schemaRegion.activateSchemaTemplate( SchemaRegionWritePlanFactory.getActivateTemplateInClusterPlan( entry.getKey(), entry.getValue().right, entry.getValue().left), template); } catch (final MetadataException e) { if (e.getErrorCode() == TSStatusCode.TEMPLATE_IS_IN_USE.getStatusCode()) { alreadyActivatedDeviceList.add(entry.getKey()); } else { logger.error(e.getMessage(), e); statusList.add(RpcUtils.getStatus(e.getErrorCode(), e.getMessage())); } } } if (!alreadyActivatedDeviceList.isEmpty()) { final TemplateIsInUseException e = new TemplateIsInUseException(alreadyActivatedDeviceList.toString()); logger.error(e.getMessage(), e); statusList.add(RpcUtils.getStatus(e.getErrorCode(), e.getMessage())); } return statusList.isEmpty() ? RpcUtils.SUCCESS_STATUS : RpcUtils.getStatus(statusList); } @Override public TSStatus visitInternalBatchActivateTemplate( final InternalBatchActivateTemplateNode node, final ISchemaRegion schemaRegion) { for (final Map.Entry<PartialPath, Pair<Integer, Integer>> entry : node.getTemplateActivationMap().entrySet()) { final Template template = ClusterTemplateManager.getInstance().getTemplate(entry.getValue().left); if (Objects.isNull(template)) { return new TSStatus(TSStatusCode.UNDEFINED_TEMPLATE.getStatusCode()) .setMessage( "The template is null when trying to activate template, may be the template is being unset."); } try { schemaRegion.activateSchemaTemplate( SchemaRegionWritePlanFactory.getActivateTemplateInClusterPlan( entry.getKey(), entry.getValue().right, entry.getValue().left), template); } catch (final TemplateIsInUseException e) { logger.info( String.format( "Device Template has already been activated on path %s, there's no need to activate again.", entry.getKey())); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } @Override public TSStatus visitConstructSchemaBlackList( final ConstructSchemaBlackListNode node, final ISchemaRegion schemaRegion) { try { final Pair<Long, Boolean> preDeletedNumAndIsAllLogicalView = schemaRegion.constructSchemaBlackList(node.getPatternTree()); return RpcUtils.getStatus( Boolean.TRUE.equals(preDeletedNumAndIsAllLogicalView.getRight()) ? TSStatusCode.ONLY_LOGICAL_VIEW : TSStatusCode.SUCCESS_STATUS, String.valueOf(preDeletedNumAndIsAllLogicalView.getLeft())); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitRollbackSchemaBlackList( final RollbackSchemaBlackListNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.rollbackSchemaBlackList(node.getPatternTree()); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitDeleteTimeseries( final DeleteTimeSeriesNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.deleteTimeseriesInBlackList(node.getPatternTree()); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitPreDeactivateTemplate( final PreDeactivateTemplateNode node, final ISchemaRegion schemaRegion) { try { return RpcUtils.getStatus( TSStatusCode.SUCCESS_STATUS, String.valueOf(schemaRegion.constructSchemaBlackListWithTemplate(node))); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitRollbackPreDeactivateTemplate( final RollbackPreDeactivateTemplateNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.rollbackSchemaBlackListWithTemplate(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitDeactivateTemplate( final DeactivateTemplateNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.deactivateTemplateInBlackList(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitCreateLogicalView( final CreateLogicalViewNode node, final ISchemaRegion schemaRegion) { final Map<PartialPath, ViewExpression> viewPathToSourceMap = node.getViewPathToSourceExpressionMap(); final List<TSStatus> failingStatus = new ArrayList<>(); for (final Map.Entry<PartialPath, ViewExpression> entry : viewPathToSourceMap.entrySet()) { try { schemaRegion.createLogicalView( SchemaRegionWritePlanFactory.getCreateLogicalViewPlan( entry.getKey(), entry.getValue())); } catch (final MetadataException e) { logger.error("{}: MetaData error: ", IoTDBConstant.GLOBAL_DB_NAME, e); failingStatus.add(RpcUtils.getStatus(e.getErrorCode(), e.getMessage())); } } if (!failingStatus.isEmpty()) { return RpcUtils.getStatus(failingStatus); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } @Override public TSStatus visitAlterLogicalView( final AlterLogicalViewNode node, final ISchemaRegion schemaRegion) { final Map<PartialPath, ViewExpression> viewPathToSourceMap = node.getViewPathToSourceMap(); final List<TSStatus> failingStatus = new ArrayList<>(); for (final Map.Entry<PartialPath, ViewExpression> entry : viewPathToSourceMap.entrySet()) { try { schemaRegion.alterLogicalView( SchemaRegionWritePlanFactory.getAlterLogicalViewPlan(entry.getKey(), entry.getValue())); } catch (final MetadataException e) { logger.warn("{}: MetaData error: ", IoTDBConstant.GLOBAL_DB_NAME, e); failingStatus.add(RpcUtils.getStatus(e.getErrorCode(), e.getMessage())); } } if (!failingStatus.isEmpty()) { return RpcUtils.getStatus(failingStatus); } return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, "Execute successfully"); } @Override public TSStatus visitConstructLogicalViewBlackList( final ConstructLogicalViewBlackListNode node, final ISchemaRegion schemaRegion) { try { return RpcUtils.getStatus( TSStatusCode.SUCCESS_STATUS, String.valueOf(schemaRegion.constructLogicalViewBlackList(node.getPatternTree()))); } catch (MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitRollbackLogicalViewBlackList( final RollbackLogicalViewBlackListNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.rollbackLogicalViewBlackList(node.getPatternTree()); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitDeleteLogicalView(DeleteLogicalViewNode node, ISchemaRegion schemaRegion) { try { schemaRegion.deleteLogicalView(node.getPatternTree()); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitCreateOrUpdateTableDevice( final CreateOrUpdateTableDeviceNode node, final ISchemaRegion schemaRegion) { try { // todo implement storage for device of diverse data types schemaRegion.createOrUpdateTableDevice(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitTableDeviceAttributeUpdate( final TableDeviceAttributeUpdateNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.updateTableDeviceAttribute(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitTableDeviceAttributeCommit( final TableDeviceAttributeCommitUpdateNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.commitUpdateAttribute(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitTableNodeLocationAdd( final TableNodeLocationAddNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.addNodeLocation(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitDeleteTableDevice( final DeleteTableDeviceNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.deleteTableDevice(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitTableAttributeColumnDrop( final TableAttributeColumnDropNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.dropTableAttribute(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitConstructTableDevicesBlackList( final ConstructTableDevicesBlackListNode node, final ISchemaRegion schemaRegion) { try { final long preDeletedNum = schemaRegion.constructTableDevicesBlackList(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS, String.valueOf(preDeletedNum)); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitRollbackTableDevicesBlackList( final RollbackTableDevicesBlackListNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.rollbackTableDevicesBlackList(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitDeleteTableDevicesInBlackList( final DeleteTableDevicesInBlackListNode node, final ISchemaRegion schemaRegion) { try { schemaRegion.deleteTableDevicesInBlackList(node); return RpcUtils.getStatus(TSStatusCode.SUCCESS_STATUS); } catch (final MetadataException e) { logger.error(e.getMessage(), e); return RpcUtils.getStatus(e.getErrorCode(), e.getMessage()); } } @Override public TSStatus visitPipeEnrichedWritePlanNode( final PipeEnrichedWritePlanNode node, final ISchemaRegion schemaRegion) { final WritePlanNode innerNode = node.getWritePlanNode(); innerNode.markAsGeneratedByPipe(); return innerNode.accept(this, schemaRegion); } @Override public TSStatus visitPipeEnrichedNonWritePlanNode( final PipeEnrichedNonWritePlanNode node, final ISchemaRegion schemaRegion) { final PlanNode innerNode = node.getNonWritePlanNode(); innerNode.markAsGeneratedByPipe(); return node.getNonWritePlanNode().accept(this, schemaRegion); } @Override public TSStatus visitPipeOperateSchemaQueueNode( final PipeOperateSchemaQueueNode node, final ISchemaRegion schemaRegion) { final SchemaRegionId id = schemaRegion.getSchemaRegionId(); final SchemaRegionListeningQueue queue = PipeDataNodeAgent.runtime().schemaListener(id); if (node.isOpen() && !queue.isOpened()) { logger.info("Opened pipe listening queue on schema region {}", id); queue.open(); } else if (!node.isOpen() && queue.isOpened()) { logger.info("Closed pipe listening queue on schema region {}", id); queue.close(); } return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode()); } @Override public TSStatus visitPlan(PlanNode node, ISchemaRegion context) { return null; } }
googleapis/google-cloud-java
35,437
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListExampleStoresRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/example_store_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Request message for * [ExampleStoreService.ListExampleStores][google.cloud.aiplatform.v1beta1.ExampleStoreService.ListExampleStores]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListExampleStoresRequest} */ public final class ListExampleStoresRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListExampleStoresRequest) ListExampleStoresRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListExampleStoresRequest.newBuilder() to construct. private ListExampleStoresRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListExampleStoresRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListExampleStoresRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ExampleStoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListExampleStoresRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ExampleStoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListExampleStoresRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest.class, com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location to list the ExampleStores from. * Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Location to list the ExampleStores from. * Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. The standard list filter. * More detail in [AIP-160](https://google.aip.dev/160). * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. The standard list filter. * More detail in [AIP-160](https://google.aip.dev/160). * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_ = 0; /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The standard list page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. The standard list page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest other = (com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [ExampleStoreService.ListExampleStores][google.cloud.aiplatform.v1beta1.ExampleStoreService.ListExampleStores]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListExampleStoresRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListExampleStoresRequest) com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ExampleStoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListExampleStoresRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ExampleStoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListExampleStoresRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest.class, com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.ExampleStoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListExampleStoresRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest build() { com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest buildPartial() { com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest result = new com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest other) { if (other == com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location to list the ExampleStores from. * Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Location to list the ExampleStores from. * Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Location to list the ExampleStores from. * Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location to list the ExampleStores from. * Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location to list the ExampleStores from. * Format: * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. The standard list filter. * More detail in [AIP-160](https://google.aip.dev/160). * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The standard list filter. * More detail in [AIP-160](https://google.aip.dev/160). * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The standard list filter. * More detail in [AIP-160](https://google.aip.dev/160). * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The standard list filter. * More detail in [AIP-160](https://google.aip.dev/160). * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. The standard list filter. * More detail in [AIP-160](https://google.aip.dev/160). * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The standard list page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The standard list page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The standard list page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. The standard list page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. The standard list page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListExampleStoresRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListExampleStoresRequest) private static final com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest(); } public static com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListExampleStoresRequest> PARSER = new com.google.protobuf.AbstractParser<ListExampleStoresRequest>() { @java.lang.Override public ListExampleStoresRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListExampleStoresRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListExampleStoresRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListExampleStoresRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/harmony
35,640
classlib/modules/swing/src/test/api/java.injected/javax/swing/text/View_ChangesTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Alexey A. Ivanov */ package javax.swing.text; import java.awt.Rectangle; import java.awt.Shape; import java.util.ArrayList; import javax.swing.BasicSwingTestCase; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.event.DocumentEvent.ElementChange; import javax.swing.event.DocumentEvent.EventType; import javax.swing.text.CompositeView_ModelViewTest.ChildView; import javax.swing.text.CompositeView_ModelViewTest.WithChildrenView; import javax.swing.text.ViewTest.DisAbstractedView; import javax.swing.text.ViewTestHelpers.ElementPartView; import javax.swing.undo.UndoableEdit; import junit.framework.TestCase; /** * Tests changes-related methods of <code>View</code> class. * */ public class View_ChangesTest extends TestCase { /** * Class overriding some methods to test View behaviour in respect * to {insert,remove,changed}Update. */ private class ChangeView extends WithChildrenView { /** * The last allocation returned from * <code>getChildAllocation</code>. */ Shape childAllocation = null; public ChangeView(final Element element) { super(element); loadChildren(viewFactory); viewsCreatedElements.clear(); replaceViews = null; } @Override public Shape getChildAllocation(final int index, final Shape shape) { if (!hasChildren) { fail("getChildAllocation is not supposed to be " + "called when there are no children"); } return childAllocation = super.getChildAllocation(index, shape); } /** * Returns a child from <code>children</code> or forwards * to <code>super</code> depending on state of flag * <code>hasChildren</code>. */ @Override public View getView(final int index) { if (hasChildren) { return super.getView(index); } return null; } /** * Returns <code>children.length</code> or forwards to * <code>super</code> depending on state of flag * <code>hasChildren</code>. */ @Override public int getViewCount() { if (hasChildren) { return super.getViewCount(); } return 0; } @Override public void replace(final int index, final int length, final View[] views) { replaceIndex = index; replaceLength = length; replaceViews = views; super.replace(index, length, views); } /** * Just a security check: <code>setParent</code> isn't called. */ @Override public void setParent(final View parent) { super.setParent(parent); fail("setParent is not supposed to be called"); } /** * Check <code>forwardUpdate</code> parameters which is * called from <code>insertUpdate</code>, * <code>removeUpdate</code>, or <code>changedUpdate</code>. */ @Override protected void forwardUpdate(final ElementChange change, final DocumentEvent event, final Shape shape, final ViewFactory factory) { forwardUpdateCalled = true; if (updateChildrenReturn) { assertSame(event.getChange(root), change); } else { assertNull(change); } assertSame(docEvent, event); assertSame(rect, shape); assertSame(viewFactory, factory); super.forwardUpdate(change, event, shape, factory); } /** * Check <code>forwardUpdateToView</code> parameters which is * called from <code>insertUpdate</code>, * <code>removeUpdate</code>, or <code>changedUpdate</code>. */ @Override protected void forwardUpdateToView(final View view, final DocumentEvent event, final Shape shape, final ViewFactory factory) { forwardUpdateToViewCalled = true; viewsForwardedTo.add(view); assertSame(docEvent, event); assertSame(childAllocation, shape); assertSame(viewFactory, factory); super.forwardUpdateToView(view, event, shape, factory); } /** * Check <code>forwardUpdate</code> parameters which is * called from <code>insertUpdate</code>, * <code>removeUpdate</code>, or <code>changedUpdate</code>. */ @Override protected boolean updateChildren(final ElementChange change, final DocumentEvent event, final ViewFactory factory) { updateChildrenCalled = true; assertSame(event.getChange(root), change); assertSame(docEvent, event); assertSame(viewFactory, factory); assertTrue(super.updateChildren(change, event, factory)); assertFalse(forwardUpdateCalled); assertFalse(updateLayoutCalled); return updateChildrenReturn; } /** * Check <code>updateLayout</code> parameters which is called * from <code>insertUpdate</code>, <code>removeUpdate</code>, * or <code>changedUpdate</code>. */ @Override protected void updateLayout(final ElementChange change, final DocumentEvent event, final Shape shape) { updateLayoutCalled = true; if (updateChildrenReturn) { assertSame(event.getChange(root), change); } else { assertNull(change); } assertSame(docEvent, event); assertSame(rect, shape); super.updateLayout(change, event, shape); } } /** * View allocation (Shape parameter). */ private static final Rectangle rect = new Rectangle(20, 20); private Document doc; /** * The event used to test the functionality. */ private DocumentEvent docEvent; private boolean forwardUpdateCalled; private boolean forwardUpdateToViewCalled; /** * Flag which controls whether anonymous test-view has children or not. */ private boolean hasChildren; private Element line; /** * Index of the first child where change happens (in call to replace). */ private int replaceIndex; /** * Number of elements to remove (in call to replace). */ private int replaceLength; /** * Views to add (in call to replace). */ private View[] replaceViews; /** * The root element where changes in document are tracked. */ private Element root; private boolean updateChildrenCalled; /** * Return value from updateChildren for anonymous test-view. */ private boolean updateChildrenReturn; private boolean updateLayoutCalled; private View view; /** * The view factory used in tests. */ private ViewFactory viewFactory; /** * List of elements for which new views were created. */ private ArrayList<Element> viewsCreatedElements = new ArrayList<Element>(); /** * List of views for which forwardUpdateToView was called. */ private ArrayList<View> viewsForwardedTo = new ArrayList<View>(); /** * Creates document event with type of <code>CHANGE</code>. */ public void createChangeEvent() throws BadLocationException { doc.insertString(doc.getLength(), "one\ntwo\n", null); view.removeAll(); ((CompositeView) view).loadChildren(viewFactory); viewsCreatedElements.clear(); replaceViews = null; ElementChange change = docEvent.getChange(doc.getDefaultRootElement()); docEvent = ((AbstractDocument) doc).new DefaultDocumentEvent(docEvent.getLength(), docEvent.getOffset(), EventType.CHANGE); ((AbstractDocument.DefaultDocumentEvent) docEvent).addEdit((UndoableEdit) change); } /** * The view has <i>no</i> children, and <code>updateChildren</code> * is <i>not</i> called as well as other methods involved. */ public void testChangedUpdate01() throws BadLocationException { createChangeEvent(); hasChildren = false; assertEquals(0, view.getViewCount()); view.changedUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertFalse(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertFalse(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>false</code>. */ public void testChangedUpdate02() throws BadLocationException { hasChildren = true; createChangeEvent(); updateChildrenReturn = false; assertEquals(4, view.getViewCount()); view.changedUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 + 2, 1); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(3, viewsForwardedTo.size()); // to all children for (int i = 0; i < viewsForwardedTo.size(); i++) { assertSame("@ " + i, view.getView(i + 1), viewsForwardedTo.get(i)); } assertTrue(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>true</code>. */ public void testChangedUpdate03() throws BadLocationException { hasChildren = true; createChangeEvent(); assertEquals(1, docEvent.getChange(root).getIndex()); updateChildrenReturn = true; assertEquals(4, view.getViewCount()); view.changedUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 + 2, 1); assertTrue(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertTrue(updateLayoutCalled); } /** * As if attributes are changed in the range 7-18: * the second paragraph (6-15), and * the third one (15, 19). * <code>updateChilren</code> returns <code>true</code> * (child views represent entire elements). */ public void testChangedUpdate04() throws BadLocationException { hasChildren = true; changeDocument(); updateChildrenReturn = true; Element prevLastLine = root.getElement(root.getElementCount() - 2); docEvent = ((AbstractDocument) doc).new DefaultDocumentEvent(line.getStartOffset() + 1, prevLastLine.getEndOffset() - 2 - line.getStartOffset(), EventType.CHANGE); view.changedUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(2, viewsForwardedTo.size()); for (int i = 0; i < viewsForwardedTo.size(); i++) { assertSame("@ " + i, view.getView(i + 1), viewsForwardedTo.get(i)); } assertTrue(updateLayoutCalled); } /** * As if attributes are changed in the range 7-18: * the second paragraph (6-15), and * the third one (15, 19). * <code>updateChilren</code> returns <code>false</code> * (child views represent entire elements). */ public void testChangedUpdate05() throws BadLocationException { hasChildren = true; changeDocument(); updateChildrenReturn = false; Element prevLastLine = root.getElement(root.getElementCount() - 2); docEvent = ((AbstractDocument) doc).new DefaultDocumentEvent(line.getStartOffset() + 1, prevLastLine.getEndOffset() - 2 - line.getStartOffset(), EventType.CHANGE); view.changedUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(2, viewsForwardedTo.size()); for (int i = 0; i < viewsForwardedTo.size(); i++) { assertSame("@ " + i, view.getView(i + 1), viewsForwardedTo.get(i)); } assertTrue(updateLayoutCalled); } /** * Tests <code>forwardUpdateToView</code> whether it calls * {insert,remove,changed}Update depending on event type. */ public void testForwardUpdateToView() { // Class to store which function is called class Params { boolean change = false; boolean insert = false; boolean remove = false; } final Params params = new Params(); view = new DisAbstractedView(line); View child = new DisAbstractedView(root.getElement(0)) { @Override public void changedUpdate(final DocumentEvent event, final Shape shape, final ViewFactory factory) { params.change = true; } @Override public void insertUpdate(final DocumentEvent event, final Shape shape, final ViewFactory factory) { params.insert = true; } @Override public void removeUpdate(final DocumentEvent event, final Shape shape, final ViewFactory factory) { params.remove = true; } }; view.forwardUpdateToView(child, ((AbstractDocument) doc).new DefaultDocumentEvent(0, 0, EventType.INSERT), rect, viewFactory); assertTrue(params.insert); params.insert = false; assertFalse(params.remove); params.remove = false; assertFalse(params.change); params.change = false; view.forwardUpdateToView(child, ((AbstractDocument) doc).new DefaultDocumentEvent(0, 0, EventType.REMOVE), rect, viewFactory); assertFalse(params.insert); params.insert = false; assertTrue(params.remove); params.remove = false; assertFalse(params.change); params.change = false; view.forwardUpdateToView(child, ((AbstractDocument) doc).new DefaultDocumentEvent(0, 0, EventType.CHANGE), rect, viewFactory); assertFalse(params.insert); params.insert = false; assertFalse(params.remove); params.remove = false; assertTrue(params.change); params.change = false; view.forwardUpdateToView(child, ((AbstractDocument) doc).new DefaultDocumentEvent(0, 0, null), rect, viewFactory); assertFalse(params.insert); params.insert = false; assertFalse(params.remove); params.remove = false; if (BasicSwingTestCase.isHarmony()) { assertFalse(params.change); params.change = false; } else { assertTrue(params.change); params.change = false; } } /** * The view has <i>no</i> children, and <code>updateChildren</code> * is <i>not</i> called. */ public void testInsertUpdate01() throws BadLocationException { doc.insertString(line.getStartOffset() + 2, "one\ntwo\n", null); hasChildren = false; assertEquals(0, view.getViewCount()); view.insertUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertFalse(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertFalse(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>false</code>. (Views may represent parts of an Element.) */ public void testInsertUpdate02() throws BadLocationException { doc.insertString(line.getStartOffset() + 2, "one\ntwo\n", null); hasChildren = true; updateChildrenReturn = false; assertEquals(2, view.getViewCount()); view.insertUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(2 + 2, 1); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(4 - 1, viewsForwardedTo.size()); // first elem not affected for (int i = 0; i < viewsForwardedTo.size(); i++) { assertSame("@ " + i, view.getView(i + 1), viewsForwardedTo.get(i)); } assertTrue(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>true</code>. (Views represent entire Elements.) */ public void testInsertUpdate03() throws BadLocationException { doc.insertString(line.getStartOffset() + 2, "one\ntwo\n", null); hasChildren = true; updateChildrenReturn = true; assertEquals(2, view.getViewCount()); view.insertUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(2 + 2, 1); assertTrue(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertTrue(updateLayoutCalled); } /** * Insert text so that structure changes occur at index of 2, while * <code>updateChildren</code> returns <code>true</code>. The result * is that changes must be forwarded to the first two view children. */ public void testInsertUpdate04() throws BadLocationException { hasChildren = true; changeDocument(); // Event method will be tested upon doc.insertString(doc.getLength(), "insert4", null); assertEquals(2, docEvent.getChange(root).getIndex()); updateChildrenReturn = true; assertEquals(4, view.getViewCount()); view.insertUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 + 0, 2); assertTrue(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertTrue(updateLayoutCalled); } /** * No structural changes occurred to the <code>root</code> element. * <code>updateChildren</code> must not be called in this case. */ public void testInsertUpdate05() throws BadLocationException { doc.insertString(line.getStartOffset() + 2, "one", null); // This should not cause any line map restructure assertNull(docEvent.getChange(root)); hasChildren = true; updateChildrenReturn = true; assertEquals(2, view.getViewCount()); view.insertUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(1, viewsForwardedTo.size()); assertSame(view.getView(1), viewsForwardedTo.get(0)); assertTrue(updateLayoutCalled); } /** * <code>viewFactory</code> parameter is <code>null</code>. */ public void testInsertUpdate06() throws BadLocationException { doc.insertString(line.getStartOffset() + 2, "one\ntwo\n", null); hasChildren = true; updateChildrenReturn = true; assertEquals(2, view.getViewCount()); try { view.insertUpdate(docEvent, rect, viewFactory = null); // So we should not check for this invalid parameter // (viewFactory == null) fail("Calling insertUpdate with null factory must result " + " in exception"); } catch (NullPointerException e) { } assertTrue(updateChildrenCalled); // The exception must have occurred in updateChildren assertFalse(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertFalse(updateLayoutCalled); } /** * <code>updateChildren</code> returns <code>true</code>. * (Views represent entire elements.) */ public void testInsertUpdate07() throws BadLocationException { hasChildren = true; changeDocument(); doc.insertString(2, "^^^\n", null); assertEquals(0, docEvent.getChange(root).getIndex()); updateChildrenReturn = true; assertEquals(4, view.getViewCount()); view.insertUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 + 1, 1); assertTrue(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertTrue(updateLayoutCalled); } /** * <code>updateChildren</code> returns <code>false</code>. * (Views represent partial elements.) */ public void testInsertUpdate08() throws BadLocationException { hasChildren = true; changeDocument(); doc.insertString(2, "^^^\n", null); assertEquals(0, docEvent.getChange(root).getIndex()); updateChildrenReturn = false; assertEquals(4, view.getViewCount()); view.insertUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 + 1, 1); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(2, viewsForwardedTo.size()); for (int i = 0; i < viewsForwardedTo.size(); i++) { assertSame("@ " + i, view.getView(i), viewsForwardedTo.get(i)); } assertTrue(updateLayoutCalled); } /** * In this test view representing <code>line</code> is replaced with two * view which represent parts of the <code>line</code> Element. * <p> * <code>updateChildren</code> returns <code>true</code>, i.e. it is * considered a view represents an entire element. */ public void testInsertUpdate09() throws BadLocationException { createPartialViews(); updateChildrenReturn = true; view.insertUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(BasicSwingTestCase.isHarmony() ? 2 : 1, viewsForwardedTo.size()); for (int i = 0; i < viewsForwardedTo.size(); i++) { assertSame("@ " + i, view.getView(i + 1), viewsForwardedTo.get(i)); assertTrue("@ " + i, view.getView(i + 1) instanceof ElementPartView); } assertTrue(updateLayoutCalled); } /** * In this test view representing <code>line</code> is replaced with two * view which represent parts of the <code>line</code> Element. * (Same as in <code>testInsertUpdate09</code> except for the below). * <p> * <code>updateChildren</code> returns <code>false</code>, i.e. it is * considered a view may represent a portion of element. */ public void testInsertUpdate10() throws BadLocationException { createPartialViews(); updateChildrenReturn = false; view.insertUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(BasicSwingTestCase.isHarmony() ? 2 : 1, viewsForwardedTo.size()); for (int i = 0; i < viewsForwardedTo.size(); i++) { assertSame("@ " + i, view.getView(i + 1), viewsForwardedTo.get(i)); assertTrue("@ " + i, view.getView(i + 1) instanceof ElementPartView); } assertTrue(updateLayoutCalled); } private void createPartialViews() throws BadLocationException { hasChildren = true; changeDocument(); final int offset = (line.getStartOffset() + line.getEndOffset()) / 2; doc.insertString(offset, "^^^^", null); View[] parts = new View[2]; parts[0] = new ElementPartView(line, line.getStartOffset(), offset + 2); parts[1] = new ElementPartView(line, offset + 2, line.getEndOffset()); view.replace(1, 1, parts); } /** * The view has <i>no</i> children, and <code>updateChildren</code> * is <i>not</i> called as well as other methods involved. */ public void testRemoveUpdate01() throws BadLocationException { changeDocument(); doc.remove(line.getStartOffset(), 9); hasChildren = false; assertEquals(0, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertFalse(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertFalse(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>false</code>. * <p> * Exactly one element is removed. */ public void testRemoveUpdate02() throws BadLocationException { hasChildren = true; changeDocument(); doc.remove(line.getStartOffset(), 9); updateChildrenReturn = false; assertEquals(4, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 - 1, 2); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(2, viewsForwardedTo.size()); assertSame(view.getView(0), viewsForwardedTo.get(0)); assertSame(view.getView(1), viewsForwardedTo.get(1)); assertTrue(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>true</code>. * <p> * Exactly one element is removed. */ public void testRemoveUpdate03() throws BadLocationException { hasChildren = true; changeDocument(); doc.remove(line.getStartOffset(), 9); updateChildrenReturn = true; assertEquals(4, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 - 1, 2); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(1, viewsForwardedTo.size()); assertSame(view.getView(0), viewsForwardedTo.get(0)); assertTrue(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>true</code>. * <p> * Text removed is within one element. */ public void testRemoveUpdate04() throws BadLocationException { hasChildren = true; changeDocument(); doc.remove(line.getStartOffset() + 1, 2); updateChildrenReturn = true; assertEquals(4, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertFalse(updateChildrenCalled); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(1, viewsForwardedTo.size()); assertSame(view.getView(1), viewsForwardedTo.get(0)); assertTrue(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>true</code>. * <p> * New line character is removed. */ public void testRemoveUpdate05() throws BadLocationException { hasChildren = true; changeDocument(); doc.remove(line.getEndOffset() - 1, 1); updateChildrenReturn = true; assertEquals(4, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 - 1, 2); assertTrue(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); // assertEquals(1, viewsForwardedTo.size()); // assertEquals(view.getView(1), viewsForwardedTo.get(0)); assertTrue(updateLayoutCalled); } /** * The view has children and <code>updateChildren</code> returns * <code>false</code>. * <p> * New line character is removed. */ public void testRemoveUpdate06() throws BadLocationException { hasChildren = true; changeDocument(); doc.remove(line.getEndOffset() - 1, 1); updateChildrenReturn = false; assertEquals(4, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 - 1, 2); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(1, viewsForwardedTo.size()); assertSame(view.getView(1), viewsForwardedTo.get(0)); assertTrue(updateLayoutCalled); } /** * This test-method is similar to testRemoveUpdate02, but the text removed * is in the first paragraph. * <p> * Exactly one element is removed. */ public void testRemoveUpdate07() throws BadLocationException { hasChildren = true; changeDocument(); doc.remove(0, line.getStartOffset()); updateChildrenReturn = false; assertEquals(4, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 - 1, 2); assertTrue(forwardUpdateCalled); assertTrue(forwardUpdateToViewCalled); assertEquals(1, viewsForwardedTo.size()); assertSame(view.getView(0), viewsForwardedTo.get(0)); assertTrue(updateLayoutCalled); } /** * This test-method is similar to testRemoveUpdate03, but the text removed * is in the first paragraph. * <p> * Exactly one element is removed. */ public void testRemoveUpdate08() throws BadLocationException { hasChildren = true; changeDocument(); doc.remove(0, line.getStartOffset()); updateChildrenReturn = true; assertEquals(4, view.getViewCount()); view.removeUpdate(docEvent, rect, viewFactory); assertTrue(updateChildrenCalled); checkUpdatedChildren(4 - 1, 2); assertTrue(forwardUpdateCalled); assertFalse(forwardUpdateToViewCalled); assertTrue(updateLayoutCalled); } /** * Tests <code>updateLayout</code> when element change is not * <code>null</code>. */ public void testUpdateLayout01() throws BadLocationException { final class Params { View child; boolean height; boolean width; } final Params params = new Params(); view = new DisAbstractedView(line) { @Override public void preferenceChanged(final View child, final boolean width, final boolean height) { params.child = child; params.width = width; params.height = height; } }; // Insert string to fill docEvent doc.insertString(line.getStartOffset() + 2, "one\ntwo\n", null); view.updateLayout(docEvent.getChange(root), docEvent, rect); assertNull(params.child); assertTrue(params.width); assertTrue(params.height); } /** * Tests <code>updateLayout</code> when element change is * <code>null</code>: seems like it has no side effects and * probably does nothing in this case. */ public void testUpdateLayout02() throws BadLocationException { final boolean[] called = new boolean[1]; view = new DisAbstractedView(line) { @Override public void preferenceChanged(final View child, final boolean width, final boolean height) { called[0] = true; } }; // Insert string to fill docEvent doc.insertString(line.getStartOffset() + 2, "one\ntwo\n", null); view.updateLayout(null, docEvent, rect); assertFalse(called[0]); } /** * Sets up the test fixture for changes tests. */ @Override protected void setUp() throws Exception { super.setUp(); doc = new PlainDocument(); doc.insertString(0, "01234\nabcde", null); root = doc.getDefaultRootElement(); line = root.getElement(1); viewFactory = new ViewFactory() { public View create(final Element element) { viewsCreatedElements.add(element); return new ChildView(element); } }; // We create anonymous subclass of View where we override // update methods to assert parameters passed view = new ChangeView(root); // Document listener to catch events on insert and remove // (so that they are real but not synthetic). But for event of // type <code>CHANGE</code> we create it ourselves. DocumentListener listener = new DocumentListener() { public void changedUpdate(final DocumentEvent event) { docEvent = event; } public void insertUpdate(final DocumentEvent event) { docEvent = event; } public void removeUpdate(final DocumentEvent event) { docEvent = event; } }; doc.addDocumentListener(listener); CompositeView_ModelViewTest.shape = rect; } private void changeDocument() throws BadLocationException { doc.insertString(doc.getLength(), "one\ntwo\n", null); line = root.getElement(1); view.removeAll(); ((CompositeView) view).loadChildren(viewFactory); viewsCreatedElements.clear(); replaceViews = null; } /** * Checks that child views were updated as expected. * * @param count the new number of children * @param length the number of child views removed */ private void checkUpdatedChildren(final int count, final int length) { Element[] added = docEvent.getChange(root).getChildrenAdded(); assertEquals("added and created are different", added.length, viewsCreatedElements .size()); for (int i = 0; i < added.length; i++) { assertSame("Elements different @ " + i, added[i], viewsCreatedElements.get(i)); } assertEquals("Child view count is unexpected", count, view.getViewCount()); assertEquals("Replace index is unexpected", docEvent.getChange(root).getIndex(), replaceIndex); assertEquals("Replace length is unexpected", length, replaceLength); assertEquals("Replace views.length is unexpected", added.length, replaceViews.length); } /*public void testUpdateChildren() { // tested in testInsertUpdate etc. } public void testForwardUpdate() { // tested in testInsertUpdate05 }*/ }
oracle/fastr
35,375
com.oracle.truffle.r.test/src/com/oracle/truffle/r/test/builtins/TestBuiltin_asvector.java
/* * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along with * this program; if not, write to the Free Software Foundation, Inc., 51 * Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * Copyright (c) 2012-2014, Purdue University * Copyright (c) 2013, 2018, Oracle and/or its affiliates * * All rights reserved. */ package com.oracle.truffle.r.test.builtins; import org.junit.Test; import com.oracle.truffle.r.test.TestRBase; // Checkstyle: stop line length check public class TestBuiltin_asvector extends TestRBase { @Override protected String getTestDir() { return "asvector"; } @Test public void testasvector1() { assertEval("argv <- list('ylog', 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector2() { assertEval("argv <- list(structure(character(0), package = character(0), class = structure('ObjectsWithPackage', package = 'methods')), 'character'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector3() { assertEval("argv <- list(quote(list(ya, x[rep.int(NA_integer_, nyy), nm.x, drop = FALSE])), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector4() { assertEval("argv <- list(NA_character_, 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector5() { assertEval("argv <- list(structure(NA_integer_, .Label = c('Australia', 'UK', 'US'), class = 'factor'), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector6() { assertEval("argv <- list(structure(list(a1 = 1:3, a2 = 4:6, a3 = 3.14159265358979, a4 = c('a', 'b', 'c')), .Names = c('a1', 'a2', 'a3', 'a4')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector7() { assertEval("argv <- list(quote(list(ii = 1:10, xx = pi * -3:6)), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector8() { assertEval("argv <- list(c(-1L, -2L), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector9() { assertEval("argv <- list(quote(list(x = 1:100, z = 1:100 + rnorm(100, 10))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector10() { assertEval("argv <- list(structure(c(FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE), .Names = c('#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render', '#ifdef', '\\\\Sexpr', 'build', 'install', 'render')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector11() { assertEval("argv <- list(structure(c(0.00290239468554411, 0.00140705152597282, 0.00182415100508824, 0.000171517300342798, 0.0747454613066297, 0.00103234723292905, 0.000179983318697139, 0.035258608446556, 0.00336847595628205, 0.0640696486471418, 0.0132108002751951, 0.00194778778741288, 0.00351950115137134, 0.00070046832029645, 0.00252844357734999, 0.014372012195495, 0.00923422554274329, 7.64817786749709e-06, 0.00387339857745524, 0.00121246491006704, 0.00624917129689857, 0.00187753034805145, 0.000103002251547081, 0.0136703020254034, 0.000349542811339765, 0.00120367047056318, 0.00194205014408536, 0.00462815827742801, 0.000149291834133955, 0.00193441236645678, 9.00084520363788e-05, 0.0160915134527436, 0.00346675958538611, 0.00481936427422656, 3.13343033856204e-05, 0.0564685345533007, 0.00929771993193244, 0.0103876340982415, 0.0133005891226512, 0.0325989357511189, 0.00228122925969392, 0.0460976655088242, 0.00300363745967821, 0.000271060875811077, 0.0301696315261026, 4.72002631048228e-05, 0.0262321004865233, 0.00594174673473013, 0.00288915040856096, 0.00635277836091399, 0.00569342819072193, 0.0163907345734163, 0.000360581939026215, 0.00023772587191537, 0.0164062036225435, 0.0238391417439454, NaN, 0.0421542087325977, 0.00133954856768466, 0.0113421570571088, 0.0081824228772913, 0.000149291834133955, 0.00162069399881579, 0.0018026229128858, 0.0043164627226381, 0.000407784303899559, 0.00876301280354452, 0.00179253664026376, 0.000416739394150718, 0.014372012195495, 0.000179983318697139, 0.00115986529332945, 0.00377736311314377, 0.00219491136307178, 0.00070046832029645, 0.000522557531637993, 9.86336244510646e-05, 0.0216346027446621, 0.000659639144027202, 0.0137501462695058, 5.91425796335962e-08, 0.0279425064631674, 0.000170828237014775, 0.0042454690355613, 0.0114879015536739, 0.000173346990819198, 0.00138111062254461, 0.00772582941114727, 0.0277947034678616, 0.00892024547056825, 0.0618577709874562, 0.0125790610228498, 0.0277947034678616), .Names = c('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector12() { assertEval("argv <- list(structure(c(0.164593338447767, 0.182090654313858, NA, 0.484947927602608), .Names = c('(Intercept)', 'x1', 'x2', 'x3')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector13() { assertEval("argv <- list('', 'double'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector14() { assertEval("argv <- list(structure(c('myTst', 'Package', 'What the package does (short line)', '1.0', '2014-03-17', 'Who wrote it', 'Who to complain to <yourfault@somewhere.net>', 'More about what it does (maybe more than one line)', 'What license is it under?', 'methods'), .Names = c('Package', 'Type', 'Title', 'Version', 'Date', 'Author', 'Maintainer', 'Description', 'License', 'Depends')), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector15() { // FIXME // Expected output: expression(sqrt(abs(`Standardized residuals`))) // FastR output: expression(sqrt, abs(`Standardized residuals`)) assertEval(Ignored.ImplementationError, "argv <- list(quote(sqrt(abs(`Standardized residuals`))), 'expression'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector16() { assertEval("argv <- list(1, 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector17() { assertEval("argv <- list(quote(list(X[[2L]])), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector18() { assertEval("argv <- list(NA, 'logical'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector19() { // docs do not explicitly handle this so it might be ReferenceError too // Expected output: numeric(0) // FastR output: NULL assertEval(Ignored.ImplementationError, "argv <- list(NULL, 'double'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector20() { assertEval("argv <- list(quote(list(x = c(1, 1, 1, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 11, 12), y = c(73, 73, 70, 74, 75, 115, 105, 107, 124, 107, 116, 125, 102, 144, 178, 149, 177, 124, 157, 128, 169, 165, 186, 152, 181, 139, 173, 151, 138, 181, 152, 188, 173, 196, 180, 171, 188, 174, 198, 172, 176, 162, 188, 182, 182, 141, 191, 190, 159, 170, 163, 197), weight = c(1, rep(0.1, 51)))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector21() { // docs do not explicitly handle this so it might be ReferenceError too // Expected output: numeric(0) // FastR output: NULL assertEval(Ignored.ImplementationError, "argv <- list(NULL, 'integer'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector22() { assertEval("argv <- list(quote(list(ff <- factor(c(1:2, NA, 2), exclude = NULL))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector23() { assertEval("argv <- list(c(-1, 3, 1, 1, 5, 1), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector24() { assertEval("argv <- list(quote(list(y, x1, x2)), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector25() { assertEval("argv <- list(structure(c(0.005, 50, 550), .Names = c('k', 'g1', 'g2')), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector26() { assertEval("argv <- list(quote(list(V1 = c('a', 'd e', 'h'), V2 = c('b\\'', 'f', 'i'), V3 = c('c', 'g', 'j\\nk l m'))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector27() { assertEval("argv <- list(NA, 'integer'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector28() { assertEval("argv <- list(c(NA, NaN), 'character'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector29() { assertEval("argv <- list(c(NA, NaN), 'integer'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector30() { assertEval("argv <- list(list('a', 'b', 'c'), 'pairlist'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector31() { assertEval("argv <- list(structure(1:12, .Dim = 3:4, .Dimnames = list(c('A', 'B', 'C'), c('D', 'E', 'F', 'G'))), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector32() { assertEval("argv <- list(quote(list(x = c(2:3, NA), y = c(3:4, NA))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector33() { assertEval("argv <- list(quote(list(cut(Dtimes, '3 months'))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector34() { assertEval("argv <- list(quote(list(a = I('abc'), b = I('def\\\'gh'))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector35() { assertEval("argv <- list(structure(list(a = 1), .Names = 'a'), 'double'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector36() { assertEval("argv <- list(structure(c(0, 0.0123079727211562, 0.00970882237374837, 0.62883302403078, 0.689843718945119, 0.689843718944881, 0.672453157851573, 0.534493702379921, 0.171039529097608, 0.17103952909345, 0.50219835346871, 0.530975095958163, 0.0050966004562048, 0.0106639382954144, 0.811192712625201, 0.0957932531337699), .Names = c('(Intercept)', 'M.userY', 'TempHigh', 'M.userY:TempHigh', 'SoftMedium', 'SoftSoft', 'M.userY:SoftMedium', 'M.userY:SoftSoft', 'TempHigh:SoftMedium', 'TempHigh:SoftSoft', 'M.userY:TempHigh:SoftMedium', 'M.userY:TempHigh:SoftSoft', 'BrandM', 'M.userY:BrandM', 'TempHigh:BrandM', 'M.userY:TempHigh:BrandM')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector37() { assertEval("argv <- list(c(-2, -1.95959595959596, -1.91919191919192, -1.87878787878788, -1.83838383838384, -1.7979797979798, -1.75757575757576, -1.71717171717172, -1.67676767676768, -1.63636363636364, -1.5959595959596, -1.55555555555556, -1.51515151515152, -1.47474747474747, -1.43434343434343, -1.39393939393939, -1.35353535353535, -1.31313131313131, -1.27272727272727, -1.23232323232323, -1.19191919191919, -1.15151515151515, -1.11111111111111, -1.07070707070707, -1.03030303030303, -0.98989898989899, -0.949494949494949, -0.909090909090909, -0.868686868686869, -0.828282828282828, -0.787878787878788, -0.747474747474747, -0.707070707070707, -0.666666666666667, -0.626262626262626, -0.585858585858586, -0.545454545454545, -0.505050505050505, -0.464646464646465, -0.424242424242424, -0.383838383838384, -0.343434343434343, -0.303030303030303, -0.262626262626263, -0.222222222222222, -0.181818181818182, -0.141414141414141, -0.101010101010101, -0.0606060606060606, -0.0202020202020201, 0.0202020202020203, 0.060606060606061, 0.101010101010101, 0.141414141414141, 0.181818181818182, 0.222222222222222, 0.262626262626263, 0.303030303030303, 0.343434343434343, 0.383838383838384, 0.424242424242424, 0.464646464646465, 0.505050505050505, 0.545454545454546, 0.585858585858586, 0.626262626262626, 0.666666666666667, 0.707070707070707, 0.747474747474748, 0.787878787878788, 0.828282828282829, 0.868686868686869, 0.909090909090909, 0.94949494949495, 0.98989898989899, 1.03030303030303, 1.07070707070707, 1.11111111111111, 1.15151515151515, 1.19191919191919, 1.23232323232323, 1.27272727272727, 1.31313131313131, 1.35353535353535, 1.39393939393939, 1.43434343434343, 1.47474747474747, 1.51515151515152, 1.55555555555556, 1.5959595959596, 1.63636363636364, 1.67676767676768, 1.71717171717172, 1.75757575757576, 1.7979797979798, 1.83838383838384, 1.87878787878788, 1.91919191919192, 1.95959595959596, 2), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector38() { assertEval("argv <- list(integer(0), 'pairlist'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector39() { assertEval("argv <- list(structure('lightblue', .Names = 'bg'), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector40() { assertEval("argv <- list(c(NA, NaN), 'logical'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector41() { // FIXME // Expected output: [1] character(0) // <0 rows> (or 0-length row.names) // FastR output: $`character(0)` // factor(0) // Levels: assertEval("argv <- list(structure(list(`character(0)` = structure(integer(0), .Label = character(0), class = 'factor')), .Names = 'character(0)', row.names = character(0), class = 'data.frame'), 'pairlist'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector42() { assertEval("argv <- list(NA, 'double'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector43() { assertEval("argv <- list(list('GRID.VP.12'), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector44() { // RInternalError: not implemented: NULL to logical assertEval(Ignored.ImplementationError, "argv <- list(NULL, 'logical'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector45() { assertEval("argv <- list(structure(1:20, .Tsp = c(1, 20, 1), class = 'ts'), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector47() { assertEval("argv <- list(structure(c(0.1, 0.8, 1, 0.5, 0.8, 1, 0, 0.5, 1), .Dim = c(3L, 3L), .Dimnames = list(c('(3.59,4.5]', '(4.5,5.4]', '(5.4,6.31]'), c('ctrl', 'trt1', 'trt2'))), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector48() { assertEval("argv <- list(integer(0), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector49() { assertEval("argv <- list(structure(c(1L, 1L), .Label = 'registered S3method for $', class = 'factor'), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector50() { assertEval("argv <- list('1.3', 'double'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector51() { assertEval("argv <- list(c(8L, 11L, 14L, 16L, 19L, 4L, 6L, 9L, 15L, NA, 7L, 10L, 20L), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector52() { assertEval("argv <- list(structure(c(5.4278733372119e-07, 0.000257866433233453, NA), .Names = c('x', 'm', 'Residuals')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector53() { assertEval("argv <- list('1.3', 'pairlist'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector54() { assertEval("argv <- list(1L, 'pairlist'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector55() { assertEval("argv <- list(NULL, 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector56() { assertEval("argv <- list(quote(list(expand.grid(Hair = lab$Hair, Eye = lab$Eye, Sex = lab$Sex, stringsAsFactors = TRUE), Fr = as.vector(HairEyeColor))), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector57() { assertEval("argv <- list(FALSE, 'pairlist'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector59() { assertEval("argv <- list(structure(list(`character(0)` = structure(integer(0), .Label = character(0), class = 'factor')), .Names = 'character(0)', row.names = character(0), class = 'data.frame'), 'character'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector60() { assertEval("argv <- list(1L, 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector61() { assertEval("argv <- list(c('The C and R code has been reformatted for legibility.', 'The old compatibility function rpconvert() has been removed.', 'The cross-validation functions allow for user interrupt at the end\\nof evaluating each split.', 'Variable Reliability in data set car90 is corrected to be an\\nordered factor, as documented.', 'Surrogate splits are now considered only if they send two or more\\ncases _with non-zero weight_ each way. For numeric/ordinal\\nvariables the restriction to non-zero weights is new: for\\ncategorical variables this is a new restriction.', 'Surrogate splits which improve only by rounding error over the\\ndefault split are no longer returned. Where weights and missing\\nvalues are present, the splits component for some of these was not\\nreturned correctly.', 'A fit of class \\\'rpart\\\' now contains a component for variable\\n‘importance’, which is reported by the summary() method.', 'The text() method gains a minlength argument, like the labels()\\nmethod. This adds finer control: the default remains pretty =\\nNULL, minlength = 1L.', 'The handling of fits with zero and fractional weights has been\\ncorrected: the results may be slightly different (or even\\nsubstantially different when the proportion of zero weights is\\nlarge).', 'Some memory leaks have been plugged.', 'There is a second vignette, longintro.Rnw, a version of the\\noriginal Mayo Tecnical Report on rpart.', 'Added dataset car90, a corrected version of the S-PLUS dataset\\ncar.all (used with permission).', 'This version does not use paste0{} and so works with R 2.14.x.', 'Merged in a set of Splus code changes that had accumulated at Mayo\\nover the course of a decade. The primary one is a change in how\\nindexing is done in the underlying C code, which leads to a major\\nspeed increase for large data sets. Essentially, for the lower\\nleaves all our time used to be eaten up by bookkeeping, and this\\nwas replaced by a different approach. The primary routine also\\nuses .Call{} so as to be more memory efficient.', 'The other major change was an error for asymmetric loss matrices,\\nprompted by a user query. With L=loss asymmetric, the altered\\npriors were computed incorrectly - they were using L\\' instead of L.\\nUpshot - the tree would not not necessarily choose optimal splits\\nfor the given loss matrix. Once chosen, splits were evaluated\\ncorrectly. The printed “improvement” values are of course the\\nwrong ones as well. It is interesting that for my little test\\ncase, with L quite asymmetric, the early splits in the tree are\\nunchanged - a good split still looks good.', 'Add the return.all argument to xpred.rpart().', 'Added a set of formal tests, i.e., cases with known answers to\\nwhich we can compare.', 'Add a usercode vignette, explaining how to add user defined\\nsplitting functions.', 'The class method now also returns the node probability.', 'Add the stagec data set, used in some tests.', 'The plot.rpart routine needs to store a value that will be visible\\nto the rpartco routine at a later time. This is now done in an\\nenvironment in the namespace.', 'Force use of registered symbols in R >= 2.16.0', 'Update Polish translations.', 'Work on message formats.', 'Add Polish translations', 'rpart, rpart.matrix: allow backticks in formulae.', 'tests/backtick.R: regession test', 'src/xval.c: ensure unused code is not compiled in.', 'Change description of margin in ?plot.rpart as suggested by Bill\\nVenables.'), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector63() { assertEval("argv <- list(2, 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector64() { assertEval("argv <- list(structure(list(c0 = structure(integer(0), .Label = character(0), class = 'factor')), .Names = 'c0', row.names = character(0), class = structure('integer(0)', .Names = 'c0')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector66() { assertEval("argv <- list(3.18309886183776e-301, 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector67() { assertEval("argv <- list(quote(list(a = 1:3, b = letters[1:3])), 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector68() { assertEval("argv <- list(NA, 'list'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector69() { assertEval("argv <- list(c(200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 1e+05, 2e+05, 5e+05), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector70() { assertEval("argv <- list(structure(c(-0.560475646552213+0i, 0.7424437487+0.205661411508856i, 1.39139505579429-0.26763356813179i, 0.928710764113827-0.221714979045717i, -0.46926798541295+1.18846175213664i, 0.7424437487-0.205661411508856i, 0.460916205989202+0i, -0.452623703774585+0.170604003753717i, -0.094501186832143+0.54302538277632i, -0.331818442379127+0.612232958468282i, 1.39139505579429+0.26763356813179i, -0.452623703774585-0.170604003753717i, 0.400771450594052+0i, -0.927967220342259+0.479716843914174i, -0.790922791530657+0.043092176305418i, 0.928710764113827+0.221714979045717i, -0.094501186832143-0.54302538277632i, -0.927967220342259-0.479716843914174i, 0.701355901563686+0i, -0.600841318509537+0.213998439984336i, -0.46926798541295-1.18846175213664i, -0.331818442379127-0.612232958468282i, -0.790922791530657-0.043092176305418i, -0.600841318509537-0.213998439984336i, -0.625039267849257+0i), .Dim = c(5L, 5L)), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector71() { assertEval("argv <- list(structure(c(2.2250738585072e-308, 1.79769313486232e+308), .Names = c('double.xmin', 'double.xmax')), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector72() { assertEval("argv <- list(structure(1.6, class = 'object_size'), 'character'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector73() { assertEval("argv <- list(structure(NA_integer_, .Label = c('no', 'yes'), class = 'factor'), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector74() { assertEval("argv <- list(FALSE, 'character'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector75() { assertEval("argv <- list(3.14159265358979, 'pairlist'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector76() { assertEval("argv <- list(structure(list(c0 = structure(character(0), class = 'AsIs')), .Names = 'c0', row.names = character(0), class = 'data.frame'), 'character'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector77() { assertEval("argv <- list(structure(list(), .Dim = 0L), 'any'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector80() { assertEval("argv <- list(structure('1', .Tsp = c(1, 1, 1), class = 'ts'), 'character'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testasvector81() { assertEval("argv <- list('diff', 'symbol'); .Internal(as.vector(argv[[1]], argv[[2]]))"); } @Test public void testAsVector() { assertEval("{ as.vector(\"foo\", \"integer\") }"); assertEval("{ as.vector(\"foo\", \"double\") }"); assertEval("{ as.vector(\"foo\", \"numeric\") }"); assertEval("{ as.vector(\"foo\", \"logical\") }"); assertEval("{ as.vector(\"foo\", \"raw\") }"); assertEval("{ as.vector(\"foo\", \"character\") }"); assertEval("{ as.vector(\"foo\", \"list\") }"); assertEval("{ as.vector(\"foo\") }"); assertEval("{ as.vector(\"foo\", \"bar\") }"); assertEval("{ as.vector(c(\"foo\", \"bar\"), \"raw\") }"); assertEval("x<-c(a=1.1, b=2.2); as.vector(x, \"raw\")"); assertEval("x<-c(a=1L, b=2L); as.vector(x, \"complex\")"); assertEval("{ x<-c(a=FALSE, b=TRUE); attr(x, \"foo\")<-\"foo\"; y<-as.vector(x); attributes(y) }"); assertEval("{ x<-c(a=1, b=2); as.vector(x, \"list\") }"); assertEval("{ x<-c(a=FALSE, b=TRUE); attr(x, \"foo\")<-\"foo\"; y<-as.vector(x, \"list\"); attributes(y) }"); assertEval("{ x<-1:4; dim(x)<-c(2, 2); dimnames(x)<-list(c(\"a\", \"b\"), c(\"c\", \"d\")); y<-as.vector(x, \"list\"); y }"); assertEval("{ as.vector(NULL, \"list\") }"); assertEval("{ as.vector(NULL) }"); assertEval("as.vector(as.symbol('asdf'), 'symbol')"); assertEval("{ x<-factor(c(\"a\", \"b\", \"a\")); as.vector(x) }"); assertEval("as.vector(x~z)"); assertEval("as.vector(file(''))"); assertEval("{ as.vector(42, NULL) }"); assertEval("{ as.vector(42, c(\"character\", \"character\")) }"); assertEval("{ as.vector(42, character()) }"); assertEval("as.vector(NULL, mode='pairlist')"); assertEval("{ as.vector.cls <- function(x, mode) 42; as.vector(structure(c(1,2), class='cls')); }"); assertEval("as.pairlist(as.pairlist(c(1,2,3)))"); assertEval("as.pairlist(mtcars[,1:3])"); assertEval("as.pairlist(structure(1:3, myattr=42))"); assertEval("as.vector(as.pairlist(structure(list(1,2,3), myattr=42)), 'list')"); // as.pairlist drops empty names... assertEval("names(as.pairlist(structure(list(3), .Names = c(''))))"); // ...but only if they are all empty... assertEval("names(as.pairlist(structure(list(3, 4), .Names = c('', 'abc'))))"); // ...like in this example assertEval("names(as.pairlist(structure(list(3, 4), .Names = c('', ''))))"); } @Test public void testAsSymbol() { assertEval("{ as.symbol(\"name\") }"); assertEval("{ as.symbol(123) }"); assertEval("{ as.symbol(as.symbol(123)) }"); assertEval("{ as.symbol(as.raw(16)) }"); assertEval("{ as.symbol(3+2i) }"); } private final String[] valuesNameableAttributable = new String[]{ "list(1,2,4)", "as.pairlist(c(1,2,3))", "c(1L, 2L, 4L)", "c(1, 2, 4)", "as.raw(c(1, 2, 4))", "c('1', '2', '4')", "c(T, F, T)", "c(1+i, 2+i, 4+i)", "parse(text='x; y; z')", // TODO: "parse(text='x+y')[[1]]", -- problem with UpdateNames and RPairList... // TODO: "function() 42", }; private final String[] valuesAttributable = new String[]{ // TODO: "as.symbol('a')", -- attributes dropping/not dropping is not correct "as.environment(list(a=3,b=4,x=5))", }; @SuppressWarnings("unused") private final String[] otherValues = new String[]{ "NULL", }; private final String[] modes = new String[]{ "integer", "numeric", "double", "raw", "logical", "complex", "character", "list", "pairlist", // TODO: "expression", -- too many differences in deparsing "symbol", "name", // TODO: "closure", // TODO: "function", "any" }; @Test public void allCombinations() { assertEval(Output.IgnoreErrorMessage, template("{ x <- %0; names(x) <- c('a','b','c'); attr(x,'myattr') <- 42; as.vector(x, mode='%1'); }", valuesNameableAttributable, modes)); assertEval(Output.IgnoreErrorMessage, template("{ x <- %0; attr(x,'myattr') <- 42; as.vector(x, mode='%1'); }", valuesAttributable, modes)); } @Test public void testCannotCoerce() { assertEval(Output.IgnoreErrorContext, "as.vector(environment())"); assertEval(Output.IgnoreErrorContext, "as.vector(environment(), 'integer')"); assertEval(Output.IgnoreErrorContext, "as.vector(function(){})"); } }
apache/druid
35,749
server/src/main/java/org/apache/druid/metadata/SqlSegmentsMetadataManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.Futures; import com.google.errorprone.annotations.concurrent.GuardedBy; import org.apache.druid.client.DataSourcesSnapshot; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Stopwatch; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.segment.SchemaPayload; import org.apache.druid.segment.SegmentMetadata; import org.apache.druid.segment.metadata.CentralizedDatasourceSchemaConfig; import org.apache.druid.segment.metadata.SegmentSchemaCache; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.joda.time.DateTime; import org.joda.time.Duration; import org.skife.jdbi.v2.Batch; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.TransactionCallback; import org.skife.jdbi.v2.TransactionStatus; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Implementation of {@link SegmentsMetadataManager}, that periodically polls * used segments from the metadata store to build a {@link DataSourcesSnapshot}. */ public class SqlSegmentsMetadataManager implements SegmentsMetadataManager { private static final EmittingLogger log = new EmittingLogger(SqlSegmentsMetadataManager.class); /** * Marker interface for objects stored in {@link #latestDatabasePoll}. See the comment for that field for details. */ private interface DatabasePoll {} /** Represents periodic {@link #poll}s happening from {@link #exec}. */ @VisibleForTesting static class PeriodicDatabasePoll implements DatabasePoll { /** * This future allows to wait until {@link #dataSourcesSnapshot} is initialized in the first {@link #poll()} * happening since {@link #startPollingDatabasePeriodically()} is called for the first time, or since the last * visible (in happens-before terms) call to {@link #startPollingDatabasePeriodically()} in case of Coordinator's * leadership changes. */ final CompletableFuture<Void> firstPollCompletionFuture = new CompletableFuture<>(); long lastPollStartTimestampInMs = -1; } /** * Represents on-demand {@link #poll} initiated at periods of time when SqlSegmentsMetadataManager doesn't poll the database * periodically. */ @VisibleForTesting static class OnDemandDatabasePoll implements DatabasePoll { final long initiationTimeNanos = System.nanoTime(); final CompletableFuture<Void> pollCompletionFuture = new CompletableFuture<>(); long nanosElapsedFromInitiation() { return System.nanoTime() - initiationTimeNanos; } } /** * Use to synchronize {@link #startPollingDatabasePeriodically}, {@link #stopPollingDatabasePeriodically}, {@link * #poll}, and {@link #isPollingDatabasePeriodically}. These methods should be synchronized to prevent from being * called at the same time if two different threads are calling them. This might be possible if Coordinator gets and * drops leadership repeatedly in quick succession. * * This lock is also used to synchronize {@link #useLatestIfWithinDelayOrPerformNewDatabasePoll} for times when SqlSegmentsMetadataManager * is not polling the database periodically (in other words, when the Coordinator is not the leader). */ private final ReentrantReadWriteLock startStopPollLock = new ReentrantReadWriteLock(); /** * Used to ensure that {@link #poll()} is never run concurrently. It should already be so (at least in production * code), where {@link #poll()} is called only from the task created in {@link #createPollTaskForStartOrder} and is * scheduled in a single-threaded {@link #exec}, so this lock is an additional safety net in case there are bugs in * the code, and for tests, where {@link #poll()} is called from the outside code. * * Not using {@link #startStopPollLock}.writeLock() in order to still be able to run {@link #poll()} concurrently * with {@link #isPollingDatabasePeriodically()}. */ private final Object pollLock = new Object(); private final ObjectMapper jsonMapper; private final ObjectReader segmentReader; private final Duration periodicPollDelay; private final Supplier<MetadataStorageTablesConfig> dbTables; private final SQLMetadataConnector connector; private final SegmentSchemaCache segmentSchemaCache; private final ServiceEmitter serviceEmitter; private final CentralizedDatasourceSchemaConfig centralizedDatasourceSchemaConfig; /** * This field is made volatile to avoid "ghost secondary reads" that may result in NPE, see * https://github.com/code-review-checklists/java-concurrency#safe-local-dcl (note that dataSourcesSnapshot resembles * a lazily initialized field). Alternative is to always read the field in a snapshot local variable, but it's too * easy to forget to do. * * This field may be updated from {@link #exec}, or from whatever thread calling {@link #doOnDemandPoll} via {@link * #useLatestIfWithinDelayOrPerformNewDatabasePoll()} via one of the public methods of SqlSegmentsMetadataManager. */ private volatile @MonotonicNonNull DataSourcesSnapshot dataSourcesSnapshot = null; /** * The latest {@link DatabasePoll} represent {@link #poll()} calls which update {@link #dataSourcesSnapshot}, either * periodically (see {@link PeriodicDatabasePoll}, {@link #startPollingDatabasePeriodically}, {@link * #stopPollingDatabasePeriodically}) or "on demand" (see {@link OnDemandDatabasePoll}), when one of the methods that * accesses {@link #dataSourcesSnapshot}'s state is * called when the Coordinator is not the leader and therefore SqlSegmentsMetadataManager isn't polling the database * periodically. * * Note that if there is a happens-before relationship between a call to {@link #startPollingDatabasePeriodically()} * (on Coordinators' leadership change) and one of the methods accessing the {@link #dataSourcesSnapshot}'s state in * this class the latter is guaranteed to await for the initiated periodic poll. This is because when the latter * method calls to {@link #useLatestSnapshotIfWithinDelay()} via {@link #useLatestIfWithinDelayOrPerformNewDatabasePoll}, they will * see the latest {@link PeriodicDatabasePoll} value (stored in this field, latestDatabasePoll, in {@link * #startPollingDatabasePeriodically()}) and to await on its {@link PeriodicDatabasePoll#firstPollCompletionFuture}. * * However, the guarantee explained above doesn't make any actual semantic difference, because on both periodic and * on-demand database polls the same invariant is maintained that the results not older than {@link * #periodicPollDelay} are used. The main difference is in performance: since on-demand polls are irregular and happen * in the context of the thread wanting to access the {@link #dataSourcesSnapshot}, that may cause delays in the * logic. On the other hand, periodic polls are decoupled into {@link #exec} and {@link * #dataSourcesSnapshot}-accessing methods should be generally "wait free" for database polls. * * The notion and the complexity of "on demand" database polls was introduced to simplify the interface of {@link * SegmentsMetadataManager} and guarantee that it always returns consistent and relatively up-to-date data, * while avoiding excessive repetitive polls. The last part * is achieved via "hooking on" other polls by awaiting on {@link PeriodicDatabasePoll#firstPollCompletionFuture} or * {@link OnDemandDatabasePoll#pollCompletionFuture}, see {@link #useLatestIfWithinDelayOrPerformNewDatabasePoll} method * implementation for details. * * Note: the overall implementation of periodic/on-demand polls is not completely optimal: for example, when the * Coordinator just stopped leading, the latest periodic {@link #poll} (which is still "fresh") is not considered * and a new on-demand poll is always initiated. This is done to simplify the implementation, while the efficiency * during Coordinator leadership switches is not a priority. * * This field is {@code volatile} because it's checked and updated in a double-checked locking manner in {@link * #useLatestIfWithinDelayOrPerformNewDatabasePoll()}. */ private volatile @Nullable DatabasePoll latestDatabasePoll = null; /** Used to cancel periodic poll task in {@link #stopPollingDatabasePeriodically}. */ @GuardedBy("startStopPollLock") private @Nullable Future<?> periodicPollTaskFuture = null; /** The number of times {@link #startPollingDatabasePeriodically} was called. */ @GuardedBy("startStopPollLock") private long startPollingCount = 0; /** * Equal to the current {@link #startPollingCount} value if the SqlSegmentsMetadataManager is currently started; -1 if * currently stopped. * * This field is used to implement a simple stamp mechanism instead of just a boolean "started" flag to prevent * the theoretical situation of two or more tasks scheduled in {@link #startPollingDatabasePeriodically()} calling * {@link #isPollingDatabasePeriodically()} and {@link #poll()} concurrently, if the sequence of {@link * #startPollingDatabasePeriodically()} - {@link #stopPollingDatabasePeriodically()} - {@link * #startPollingDatabasePeriodically()} actions occurs quickly. * * {@link SQLMetadataRuleManager} also has a similar issue. */ @GuardedBy("startStopPollLock") private long currentStartPollingOrder = -1; @GuardedBy("startStopPollLock") private @Nullable ScheduledExecutorService exec = null; private Future<?> usedFlagLastUpdatedPopulationFuture; public SqlSegmentsMetadataManager( ObjectMapper jsonMapper, Supplier<SegmentsMetadataManagerConfig> config, Supplier<MetadataStorageTablesConfig> dbTables, SQLMetadataConnector connector, SegmentSchemaCache segmentSchemaCache, CentralizedDatasourceSchemaConfig centralizedDatasourceSchemaConfig, ServiceEmitter serviceEmitter ) { this.jsonMapper = jsonMapper; this.segmentReader = jsonMapper.readerFor(DataSegment.class); this.periodicPollDelay = config.get().getPollDuration().toStandardDuration(); this.dbTables = dbTables; this.connector = connector; this.segmentSchemaCache = segmentSchemaCache; this.centralizedDatasourceSchemaConfig = centralizedDatasourceSchemaConfig; this.serviceEmitter = serviceEmitter; } @Override public void start() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (exec != null) { return; // Already started } exec = Execs.scheduledSingleThreaded(StringUtils.encodeForFormat(getClass().getName()) + "-Exec--%d"); } finally { lock.unlock(); } } @Override public void stop() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { exec.shutdownNow(); exec = null; } finally { lock.unlock(); } } @Override public void startPollingDatabasePeriodically() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (exec == null) { throw new IllegalStateException(getClass().getName() + " is not started"); } if (isPollingDatabasePeriodically()) { return; } PeriodicDatabasePoll periodicDatabasePoll = new PeriodicDatabasePoll(); latestDatabasePoll = periodicDatabasePoll; startPollingCount++; currentStartPollingOrder = startPollingCount; final long localStartOrder = currentStartPollingOrder; periodicPollTaskFuture = exec.scheduleWithFixedDelay( createPollTaskForStartOrder(localStartOrder, periodicDatabasePoll), 0, periodicPollDelay.getMillis(), TimeUnit.MILLISECONDS ); } finally { lock.unlock(); } } @Override public void stopAsyncUsedFlagLastUpdatedUpdate() { if (!usedFlagLastUpdatedPopulationFuture.isDone() && !usedFlagLastUpdatedPopulationFuture.isCancelled()) { usedFlagLastUpdatedPopulationFuture.cancel(true); } } @Override public void populateUsedFlagLastUpdatedAsync() { ExecutorService executorService = Executors.newSingleThreadExecutor(); usedFlagLastUpdatedPopulationFuture = executorService.submit( this::populateUsedFlagLastUpdated ); } /** * Populate used_status_last_updated for unused segments whose current value for said column is NULL * * The updates are made incrementally. */ @VisibleForTesting void populateUsedFlagLastUpdated() { final String segmentsTable = getSegmentsTable(); log.info( "Populating column 'used_status_last_updated' with non-NULL values for unused segments in table[%s].", segmentsTable ); final int batchSize = 100; int totalUpdatedEntries = 0; // Update the rows in batches of size 100 while (true) { final List<String> segmentsToUpdate = new ArrayList<>(batchSize); int numUpdatedRows; try { connector.retryWithHandle( handle -> { segmentsToUpdate.addAll(handle.createQuery( StringUtils.format( "SELECT id FROM %1$s WHERE used_status_last_updated IS NULL and used = :used %2$s", segmentsTable, connector.limitClause(batchSize) ) ).bind("used", false).mapTo(String.class).list()); return null; } ); if (segmentsToUpdate.isEmpty()) { break; } numUpdatedRows = connector.retryWithHandle( handle -> { final Batch updateBatch = handle.createBatch(); final String sql = "UPDATE %1$s SET used_status_last_updated = '%2$s' WHERE id = '%3$s'"; String now = DateTimes.nowUtc().toString(); for (String id : segmentsToUpdate) { updateBatch.add(StringUtils.format(sql, segmentsTable, now, id)); } int[] results = updateBatch.execute(); return Arrays.stream(results).sum(); } ); totalUpdatedEntries += numUpdatedRows; } catch (Exception e) { log.warn(e, "Populating column 'used_status_last_updated' in table[%s] has failed. There may be unused segments with" + " NULL values for 'used_status_last_updated' that won't be killed!", segmentsTable); return; } log.debug( "Updated a batch of [%d] rows in table[%s] with a valid used_status_last_updated date", segmentsToUpdate.size(), segmentsTable ); // Do not wait if there are no more segments to update if (segmentsToUpdate.size() == numUpdatedRows && numUpdatedRows < batchSize) { break; } // Wait for some time before processing the next batch try { Thread.sleep(10000); } catch (InterruptedException e) { log.info("Interrupted, exiting!"); Thread.currentThread().interrupt(); } } log.info( "Populated column 'used_status_last_updated' in table[%s] in [%d] rows.", segmentsTable, totalUpdatedEntries ); } private Runnable createPollTaskForStartOrder(long startOrder, PeriodicDatabasePoll periodicDatabasePoll) { return () -> { // If latest poll was an OnDemandDatabasePoll that started less than periodicPollDelay, // We will wait for (periodicPollDelay - currentTime - LatestOnDemandDatabasePollStartTime) then check again. try { long periodicPollDelayNanos = TimeUnit.MILLISECONDS.toNanos(periodicPollDelay.getMillis()); while (latestDatabasePoll != null && latestDatabasePoll instanceof OnDemandDatabasePoll && ((OnDemandDatabasePoll) latestDatabasePoll).nanosElapsedFromInitiation() < periodicPollDelayNanos) { long sleepNano = periodicPollDelayNanos - ((OnDemandDatabasePoll) latestDatabasePoll).nanosElapsedFromInitiation(); TimeUnit.NANOSECONDS.sleep(sleepNano); } } catch (Exception e) { log.debug(e, "Exception found while waiting for next periodic poll"); } // poll() is synchronized together with startPollingDatabasePeriodically(), stopPollingDatabasePeriodically() and // isPollingDatabasePeriodically() to ensure that when stopPollingDatabasePeriodically() exits, poll() won't // actually run anymore after that (it could only enter the synchronized section and exit immediately because the // localStartedOrder doesn't match the new currentStartPollingOrder). It's needed to avoid flakiness in // SqlSegmentsMetadataManagerTest. See https://github.com/apache/druid/issues/6028 ReentrantReadWriteLock.ReadLock lock = startStopPollLock.readLock(); lock.lock(); try { if (startOrder == currentStartPollingOrder) { periodicDatabasePoll.lastPollStartTimestampInMs = System.currentTimeMillis(); poll(); periodicDatabasePoll.firstPollCompletionFuture.complete(null); latestDatabasePoll = periodicDatabasePoll; } else { log.debug("startOrder = currentStartPollingOrder = %d, skipping poll()", startOrder); } } catch (Throwable t) { log.makeAlert(t, "Uncaught exception in %s's polling thread", SqlSegmentsMetadataManager.class).emit(); // Swallow the exception, so that scheduled polling goes on. Leave firstPollFutureSinceLastStart uncompleted // for now, so that it may be completed during the next poll. if (!(t instanceof Exception)) { // Don't try to swallow a Throwable which is not an Exception (that is, a Error). periodicDatabasePoll.firstPollCompletionFuture.completeExceptionally(t); throw t; } } finally { lock.unlock(); } }; } @Override public boolean isPollingDatabasePeriodically() { // isPollingDatabasePeriodically() is synchronized together with startPollingDatabasePeriodically(), // stopPollingDatabasePeriodically() and poll() to ensure that the latest currentStartPollingOrder is always // visible. readLock should be used to avoid unexpected performance degradation of DruidCoordinator. ReentrantReadWriteLock.ReadLock lock = startStopPollLock.readLock(); lock.lock(); try { return currentStartPollingOrder >= 0; } finally { lock.unlock(); } } @Override public void stopPollingDatabasePeriodically() { ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (!isPollingDatabasePeriodically()) { return; } periodicPollTaskFuture.cancel(false); latestDatabasePoll = null; // NOT nulling dataSourcesSnapshot, allowing to query the latest polled data even when this SegmentsMetadataManager // object is stopped. currentStartPollingOrder = -1; } finally { lock.unlock(); } } private void useLatestIfWithinDelayOrPerformNewDatabasePoll() { // Double-checked locking with useLatestSnapshotIfWithinDelay() call playing the role of the "check". if (useLatestSnapshotIfWithinDelay()) { return; } ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { if (useLatestSnapshotIfWithinDelay()) { return; } OnDemandDatabasePoll onDemandDatabasePoll = new OnDemandDatabasePoll(); this.latestDatabasePoll = onDemandDatabasePoll; doOnDemandPoll(onDemandDatabasePoll); } finally { lock.unlock(); } } /** * This method returns true without waiting for database poll if the latest {@link DatabasePoll} is a * {@link PeriodicDatabasePoll} that has completed it's first poll, or an {@link OnDemandDatabasePoll} that is * made not longer than {@link #periodicPollDelay} from current time. * This method does wait untill completion for if the latest {@link DatabasePoll} is a * {@link PeriodicDatabasePoll} that has not completed it's first poll, or an {@link OnDemandDatabasePoll} that is * already in the process of polling the database. * This means that any method using this check can read from snapshot that is * up to {@link SqlSegmentsMetadataManager#periodicPollDelay} old. */ @VisibleForTesting boolean useLatestSnapshotIfWithinDelay() { DatabasePoll latestDatabasePoll = this.latestDatabasePoll; if (latestDatabasePoll instanceof PeriodicDatabasePoll) { Futures.getUnchecked(((PeriodicDatabasePoll) latestDatabasePoll).firstPollCompletionFuture); return true; } if (latestDatabasePoll instanceof OnDemandDatabasePoll) { long periodicPollDelayNanos = TimeUnit.MILLISECONDS.toNanos(periodicPollDelay.getMillis()); OnDemandDatabasePoll latestOnDemandPoll = (OnDemandDatabasePoll) latestDatabasePoll; boolean latestDatabasePollIsFresh = latestOnDemandPoll.nanosElapsedFromInitiation() < periodicPollDelayNanos; if (latestDatabasePollIsFresh) { Futures.getUnchecked(latestOnDemandPoll.pollCompletionFuture); return true; } // Latest on-demand poll is not fresh. Fall through to return false from this method. } else { assert latestDatabasePoll == null; // No periodic database polls and no on-demand poll have been done yet, nothing to await for. } return false; } /** * This method will always force a database poll if there is no ongoing database poll. This method will then * waits for the new poll or the ongoing poll to completes before returning. * This means that any method using this check can be sure that the latest poll for the snapshot was completed after * this method was called. */ @VisibleForTesting void forceOrWaitOngoingDatabasePoll() { long checkStartTime = System.currentTimeMillis(); ReentrantReadWriteLock.WriteLock lock = startStopPollLock.writeLock(); lock.lock(); try { DatabasePoll latestDatabasePoll = this.latestDatabasePoll; try { //Verify if there was a periodic poll completed while we were waiting for the lock if (latestDatabasePoll instanceof PeriodicDatabasePoll && ((PeriodicDatabasePoll) latestDatabasePoll).lastPollStartTimestampInMs > checkStartTime) { return; } // Verify if there was a on-demand poll completed while we were waiting for the lock if (latestDatabasePoll instanceof OnDemandDatabasePoll) { long checkStartTimeNanos = TimeUnit.MILLISECONDS.toNanos(checkStartTime); OnDemandDatabasePoll latestOnDemandPoll = (OnDemandDatabasePoll) latestDatabasePoll; if (latestOnDemandPoll.initiationTimeNanos > checkStartTimeNanos) { return; } } } catch (Exception e) { // Latest poll was unsuccessful, try to do a new poll log.debug(e, "Latest poll was unsuccessful. Starting a new poll..."); } // Force a database poll OnDemandDatabasePoll onDemandDatabasePoll = new OnDemandDatabasePoll(); this.latestDatabasePoll = onDemandDatabasePoll; doOnDemandPoll(onDemandDatabasePoll); } finally { lock.unlock(); } } private void doOnDemandPoll(OnDemandDatabasePoll onDemandPoll) { try { poll(); onDemandPoll.pollCompletionFuture.complete(null); } catch (Throwable t) { onDemandPoll.pollCompletionFuture.completeExceptionally(t); throw t; } } @Override public DataSourcesSnapshot getRecentDataSourcesSnapshot() { useLatestIfWithinDelayOrPerformNewDatabasePoll(); return dataSourcesSnapshot; } @Override public DataSourcesSnapshot forceUpdateDataSourcesSnapshot() { forceOrWaitOngoingDatabasePoll(); return dataSourcesSnapshot; } /** * Returns the last snapshot built by the manager. This method always returns * immediately, even if the last snapshot is older than the poll period. */ @VisibleForTesting DataSourcesSnapshot getLatestDataSourcesSnapshot() { return dataSourcesSnapshot; } @VisibleForTesting DatabasePoll getLatestDatabasePoll() { return latestDatabasePoll; } @VisibleForTesting void poll() { // See the comment to the pollLock field, explaining this synchronized block synchronized (pollLock) { if (centralizedDatasourceSchemaConfig.isEnabled()) { pollSegmentAndSchema(); } else { pollSegments(); } } } private void pollSegments() { final DateTime startTime = DateTimes.nowUtc(); final Stopwatch stopwatch = Stopwatch.createStarted(); // Some databases such as PostgreSQL require auto-commit turned off // to stream results back, enabling transactions disables auto-commit // setting connection to read-only will allow some database such as MySQL // to automatically use read-only transaction mode, further optimizing the query final List<DataSegment> segments = connector.inReadOnlyTransaction( (handle, status) -> handle .createQuery(StringUtils.format("SELECT payload FROM %s WHERE used=true", getSegmentsTable())) .setFetchSize(connector.getStreamingFetchSize()) .map((index, r, ctx) -> { try { DataSegment segment = segmentReader.readValue(r.getBytes("payload")); return replaceWithExistingSegmentIfPresent(segment); } catch (IOException e) { log.makeAlert(e, "Failed to read segment from db.").emit(); // If one entry in database is corrupted doPoll() should continue to work overall. See // filter by `Objects::nonNull` below in this method. return null; } }).list() ); Preconditions.checkNotNull( segments, "Unexpected 'null' when polling segments from the db, aborting snapshot update." ); stopwatch.stop(); emitMetric("segment/poll/time", stopwatch.millisElapsed()); log.info( "Polled and found [%,d] segments in the database in [%,d]ms.", segments.size(), stopwatch.millisElapsed() ); createDatasourcesSnapshot(startTime, segments); } private void pollSegmentAndSchema() { final DateTime startTime = DateTimes.nowUtc(); final Stopwatch stopwatch = Stopwatch.createStarted(); ImmutableMap.Builder<SegmentId, SegmentMetadata> segmentMetadataBuilder = new ImmutableMap.Builder<>(); // Collect and emit stats for the schema cache before every DB poll segmentSchemaCache.getStats().forEach(this::emitMetric); // some databases such as PostgreSQL require auto-commit turned off // to stream results back, enabling transactions disables auto-commit // // setting connection to read-only will allow some database such as MySQL // to automatically use read-only transaction mode, further optimizing the query final List<DataSegment> segments = connector.inReadOnlyTransaction( new TransactionCallback<>() { @Override public List<DataSegment> inTransaction(Handle handle, TransactionStatus status) { return handle .createQuery(StringUtils.format("SELECT payload, schema_fingerprint, num_rows FROM %s WHERE used=true", getSegmentsTable())) .setFetchSize(connector.getStreamingFetchSize()) .map( (index, r, ctx) -> { try { DataSegment segment = jsonMapper.readValue(r.getBytes("payload"), DataSegment.class); Long numRows = (Long) r.getObject("num_rows"); String schemaFingerprint = r.getString("schema_fingerprint"); if (schemaFingerprint != null && numRows != null) { segmentMetadataBuilder.put( segment.getId(), new SegmentMetadata(numRows, schemaFingerprint) ); } return replaceWithExistingSegmentIfPresent(segment); } catch (IOException e) { log.makeAlert(e, "Failed to read segment from db.").emit(); // If one entry in database is corrupted doPoll() should continue to work overall. See // filter by `Objects::nonNull` below in this method. return null; } } ) .list(); } } ); ImmutableMap.Builder<String, SchemaPayload> schemaMapBuilder = new ImmutableMap.Builder<>(); final String schemaPollQuery = StringUtils.format( "SELECT fingerprint, payload FROM %s WHERE version = %s", getSegmentSchemaTable(), CentralizedDatasourceSchemaConfig.SCHEMA_VERSION ); connector.inReadOnlyTransaction( (handle, status) -> { handle.createQuery(schemaPollQuery) .setFetchSize(connector.getStreamingFetchSize()) .map((index, r, ctx) -> { try { schemaMapBuilder.put( r.getString("fingerprint"), jsonMapper.readValue(r.getBytes("payload"), SchemaPayload.class) ); } catch (IOException e) { log.makeAlert(e, "Failed to read schema from db.").emit(); } return null; }).list(); return null; }); ImmutableMap<String, SchemaPayload> schemaMap = schemaMapBuilder.build(); segmentSchemaCache.resetSchemaForPublishedSegments(segmentMetadataBuilder.build(), schemaMap); Preconditions.checkNotNull( segments, "Unexpected 'null' when polling segments from the db, aborting snapshot update." ); stopwatch.stop(); emitMetric("segment/pollWithSchema/time", stopwatch.millisElapsed()); log.info( "Polled and found [%,d] segments and [%,d] schemas in the database in [%,d]ms.", segments.size(), schemaMap.size(), stopwatch.millisElapsed() ); createDatasourcesSnapshot(startTime, segments); } private void emitMetric(String metricName, long value) { serviceEmitter.emit(new ServiceMetricEvent.Builder().setMetric(metricName, value)); } private void createDatasourcesSnapshot(DateTime snapshotTime, List<DataSegment> segments) { final Stopwatch stopwatch = Stopwatch.createStarted(); // dataSourcesSnapshot is updated only here and the DataSourcesSnapshot object is immutable. If data sources or // segments are marked as used or unused directly (via markAs...() methods in SegmentsMetadataManager), the // dataSourcesSnapshot can become invalid until the next database poll. // DataSourcesSnapshot computes the overshadowed segments, which makes it an expensive operation if the // snapshot was invalidated on each segment mark as unused or used, especially if a user issues a lot of single // segment mark calls in rapid succession. So the snapshot update is not done outside of database poll at this time. // Updates outside of database polls were primarily for the user experience, so users would immediately see the // effect of a segment mark call reflected in MetadataResource API calls. dataSourcesSnapshot = DataSourcesSnapshot.fromUsedSegments( Iterables.filter(segments, Objects::nonNull), // Filter corrupted entries (see above in this method). snapshotTime ); emitMetric("segment/buildSnapshot/time", stopwatch.millisElapsed()); log.debug( "Created snapshot from polled segments in [%d]ms. Found [%d] overshadowed segments.", stopwatch.millisElapsed(), dataSourcesSnapshot.getOvershadowedSegments().size() ); } /** * For the garbage collector in Java, it's better to keep new objects short-living, but once they are old enough * (i.e. promoted to old generation), try to keep them alive. In {@link #poll()}, we fetch and deserialize all * existing segments each time, and then replace them in {@link #dataSourcesSnapshot}. This method allows to use * already existing (old) segments when possible, effectively interning them a-la {@link String#intern} or {@link * com.google.common.collect.Interner}, aiming to make the majority of {@link DataSegment} objects garbage soon after * they are deserialized and to die in young generation. It allows to avoid fragmentation of the old generation and * full GCs. */ private DataSegment replaceWithExistingSegmentIfPresent(DataSegment segment) { @MonotonicNonNull DataSourcesSnapshot dataSourcesSnapshot = this.dataSourcesSnapshot; if (dataSourcesSnapshot == null) { return segment; } @Nullable ImmutableDruidDataSource dataSource = dataSourcesSnapshot.getDataSource(segment.getDataSource()); if (dataSource == null) { return segment; } DataSegment alreadyExistingSegment = dataSource.getSegment(segment.getId()); return alreadyExistingSegment != null ? alreadyExistingSegment : segment; } private String getSegmentsTable() { return dbTables.get().getSegmentsTable(); } private String getSegmentSchemaTable() { return dbTables.get().getSegmentSchemasTable(); } }
googleapis/google-cloud-java
35,479
java-speech/proto-google-cloud-speech-v1/src/main/java/com/google/cloud/speech/v1/ListPhraseSetResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/speech/v1/cloud_speech_adaptation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.speech.v1; /** * * * <pre> * Message returned to the client by the `ListPhraseSet` method. * </pre> * * Protobuf type {@code google.cloud.speech.v1.ListPhraseSetResponse} */ public final class ListPhraseSetResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.speech.v1.ListPhraseSetResponse) ListPhraseSetResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPhraseSetResponse.newBuilder() to construct. private ListPhraseSetResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPhraseSetResponse() { phraseSets_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPhraseSetResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.speech.v1.SpeechAdaptationProto .internal_static_google_cloud_speech_v1_ListPhraseSetResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.speech.v1.SpeechAdaptationProto .internal_static_google_cloud_speech_v1_ListPhraseSetResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.speech.v1.ListPhraseSetResponse.class, com.google.cloud.speech.v1.ListPhraseSetResponse.Builder.class); } public static final int PHRASE_SETS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.speech.v1.PhraseSet> phraseSets_; /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.speech.v1.PhraseSet> getPhraseSetsList() { return phraseSets_; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.speech.v1.PhraseSetOrBuilder> getPhraseSetsOrBuilderList() { return phraseSets_; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ @java.lang.Override public int getPhraseSetsCount() { return phraseSets_.size(); } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ @java.lang.Override public com.google.cloud.speech.v1.PhraseSet getPhraseSets(int index) { return phraseSets_.get(index); } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ @java.lang.Override public com.google.cloud.speech.v1.PhraseSetOrBuilder getPhraseSetsOrBuilder(int index) { return phraseSets_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < phraseSets_.size(); i++) { output.writeMessage(1, phraseSets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < phraseSets_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, phraseSets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.speech.v1.ListPhraseSetResponse)) { return super.equals(obj); } com.google.cloud.speech.v1.ListPhraseSetResponse other = (com.google.cloud.speech.v1.ListPhraseSetResponse) obj; if (!getPhraseSetsList().equals(other.getPhraseSetsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPhraseSetsCount() > 0) { hash = (37 * hash) + PHRASE_SETS_FIELD_NUMBER; hash = (53 * hash) + getPhraseSetsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.speech.v1.ListPhraseSetResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.speech.v1.ListPhraseSetResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message returned to the client by the `ListPhraseSet` method. * </pre> * * Protobuf type {@code google.cloud.speech.v1.ListPhraseSetResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.speech.v1.ListPhraseSetResponse) com.google.cloud.speech.v1.ListPhraseSetResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.speech.v1.SpeechAdaptationProto .internal_static_google_cloud_speech_v1_ListPhraseSetResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.speech.v1.SpeechAdaptationProto .internal_static_google_cloud_speech_v1_ListPhraseSetResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.speech.v1.ListPhraseSetResponse.class, com.google.cloud.speech.v1.ListPhraseSetResponse.Builder.class); } // Construct using com.google.cloud.speech.v1.ListPhraseSetResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (phraseSetsBuilder_ == null) { phraseSets_ = java.util.Collections.emptyList(); } else { phraseSets_ = null; phraseSetsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.speech.v1.SpeechAdaptationProto .internal_static_google_cloud_speech_v1_ListPhraseSetResponse_descriptor; } @java.lang.Override public com.google.cloud.speech.v1.ListPhraseSetResponse getDefaultInstanceForType() { return com.google.cloud.speech.v1.ListPhraseSetResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.speech.v1.ListPhraseSetResponse build() { com.google.cloud.speech.v1.ListPhraseSetResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.speech.v1.ListPhraseSetResponse buildPartial() { com.google.cloud.speech.v1.ListPhraseSetResponse result = new com.google.cloud.speech.v1.ListPhraseSetResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.speech.v1.ListPhraseSetResponse result) { if (phraseSetsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { phraseSets_ = java.util.Collections.unmodifiableList(phraseSets_); bitField0_ = (bitField0_ & ~0x00000001); } result.phraseSets_ = phraseSets_; } else { result.phraseSets_ = phraseSetsBuilder_.build(); } } private void buildPartial0(com.google.cloud.speech.v1.ListPhraseSetResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.speech.v1.ListPhraseSetResponse) { return mergeFrom((com.google.cloud.speech.v1.ListPhraseSetResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.speech.v1.ListPhraseSetResponse other) { if (other == com.google.cloud.speech.v1.ListPhraseSetResponse.getDefaultInstance()) return this; if (phraseSetsBuilder_ == null) { if (!other.phraseSets_.isEmpty()) { if (phraseSets_.isEmpty()) { phraseSets_ = other.phraseSets_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePhraseSetsIsMutable(); phraseSets_.addAll(other.phraseSets_); } onChanged(); } } else { if (!other.phraseSets_.isEmpty()) { if (phraseSetsBuilder_.isEmpty()) { phraseSetsBuilder_.dispose(); phraseSetsBuilder_ = null; phraseSets_ = other.phraseSets_; bitField0_ = (bitField0_ & ~0x00000001); phraseSetsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPhraseSetsFieldBuilder() : null; } else { phraseSetsBuilder_.addAllMessages(other.phraseSets_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.speech.v1.PhraseSet m = input.readMessage( com.google.cloud.speech.v1.PhraseSet.parser(), extensionRegistry); if (phraseSetsBuilder_ == null) { ensurePhraseSetsIsMutable(); phraseSets_.add(m); } else { phraseSetsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.speech.v1.PhraseSet> phraseSets_ = java.util.Collections.emptyList(); private void ensurePhraseSetsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { phraseSets_ = new java.util.ArrayList<com.google.cloud.speech.v1.PhraseSet>(phraseSets_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.speech.v1.PhraseSet, com.google.cloud.speech.v1.PhraseSet.Builder, com.google.cloud.speech.v1.PhraseSetOrBuilder> phraseSetsBuilder_; /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public java.util.List<com.google.cloud.speech.v1.PhraseSet> getPhraseSetsList() { if (phraseSetsBuilder_ == null) { return java.util.Collections.unmodifiableList(phraseSets_); } else { return phraseSetsBuilder_.getMessageList(); } } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public int getPhraseSetsCount() { if (phraseSetsBuilder_ == null) { return phraseSets_.size(); } else { return phraseSetsBuilder_.getCount(); } } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public com.google.cloud.speech.v1.PhraseSet getPhraseSets(int index) { if (phraseSetsBuilder_ == null) { return phraseSets_.get(index); } else { return phraseSetsBuilder_.getMessage(index); } } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder setPhraseSets(int index, com.google.cloud.speech.v1.PhraseSet value) { if (phraseSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePhraseSetsIsMutable(); phraseSets_.set(index, value); onChanged(); } else { phraseSetsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder setPhraseSets( int index, com.google.cloud.speech.v1.PhraseSet.Builder builderForValue) { if (phraseSetsBuilder_ == null) { ensurePhraseSetsIsMutable(); phraseSets_.set(index, builderForValue.build()); onChanged(); } else { phraseSetsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder addPhraseSets(com.google.cloud.speech.v1.PhraseSet value) { if (phraseSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePhraseSetsIsMutable(); phraseSets_.add(value); onChanged(); } else { phraseSetsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder addPhraseSets(int index, com.google.cloud.speech.v1.PhraseSet value) { if (phraseSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePhraseSetsIsMutable(); phraseSets_.add(index, value); onChanged(); } else { phraseSetsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder addPhraseSets(com.google.cloud.speech.v1.PhraseSet.Builder builderForValue) { if (phraseSetsBuilder_ == null) { ensurePhraseSetsIsMutable(); phraseSets_.add(builderForValue.build()); onChanged(); } else { phraseSetsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder addPhraseSets( int index, com.google.cloud.speech.v1.PhraseSet.Builder builderForValue) { if (phraseSetsBuilder_ == null) { ensurePhraseSetsIsMutable(); phraseSets_.add(index, builderForValue.build()); onChanged(); } else { phraseSetsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder addAllPhraseSets( java.lang.Iterable<? extends com.google.cloud.speech.v1.PhraseSet> values) { if (phraseSetsBuilder_ == null) { ensurePhraseSetsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, phraseSets_); onChanged(); } else { phraseSetsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder clearPhraseSets() { if (phraseSetsBuilder_ == null) { phraseSets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { phraseSetsBuilder_.clear(); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public Builder removePhraseSets(int index) { if (phraseSetsBuilder_ == null) { ensurePhraseSetsIsMutable(); phraseSets_.remove(index); onChanged(); } else { phraseSetsBuilder_.remove(index); } return this; } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public com.google.cloud.speech.v1.PhraseSet.Builder getPhraseSetsBuilder(int index) { return getPhraseSetsFieldBuilder().getBuilder(index); } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public com.google.cloud.speech.v1.PhraseSetOrBuilder getPhraseSetsOrBuilder(int index) { if (phraseSetsBuilder_ == null) { return phraseSets_.get(index); } else { return phraseSetsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public java.util.List<? extends com.google.cloud.speech.v1.PhraseSetOrBuilder> getPhraseSetsOrBuilderList() { if (phraseSetsBuilder_ != null) { return phraseSetsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(phraseSets_); } } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public com.google.cloud.speech.v1.PhraseSet.Builder addPhraseSetsBuilder() { return getPhraseSetsFieldBuilder() .addBuilder(com.google.cloud.speech.v1.PhraseSet.getDefaultInstance()); } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public com.google.cloud.speech.v1.PhraseSet.Builder addPhraseSetsBuilder(int index) { return getPhraseSetsFieldBuilder() .addBuilder(index, com.google.cloud.speech.v1.PhraseSet.getDefaultInstance()); } /** * * * <pre> * The phrase set. * </pre> * * <code>repeated .google.cloud.speech.v1.PhraseSet phrase_sets = 1;</code> */ public java.util.List<com.google.cloud.speech.v1.PhraseSet.Builder> getPhraseSetsBuilderList() { return getPhraseSetsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.speech.v1.PhraseSet, com.google.cloud.speech.v1.PhraseSet.Builder, com.google.cloud.speech.v1.PhraseSetOrBuilder> getPhraseSetsFieldBuilder() { if (phraseSetsBuilder_ == null) { phraseSetsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.speech.v1.PhraseSet, com.google.cloud.speech.v1.PhraseSet.Builder, com.google.cloud.speech.v1.PhraseSetOrBuilder>( phraseSets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); phraseSets_ = null; } return phraseSetsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.speech.v1.ListPhraseSetResponse) } // @@protoc_insertion_point(class_scope:google.cloud.speech.v1.ListPhraseSetResponse) private static final com.google.cloud.speech.v1.ListPhraseSetResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.speech.v1.ListPhraseSetResponse(); } public static com.google.cloud.speech.v1.ListPhraseSetResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPhraseSetResponse> PARSER = new com.google.protobuf.AbstractParser<ListPhraseSetResponse>() { @java.lang.Override public ListPhraseSetResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPhraseSetResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPhraseSetResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.speech.v1.ListPhraseSetResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,543
java-tasks/proto-google-cloud-tasks-v2/src/main/java/com/google/cloud/tasks/v2/UpdateQueueRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/tasks/v2/cloudtasks.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.tasks.v2; /** * * * <pre> * Request message for * [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. * </pre> * * Protobuf type {@code google.cloud.tasks.v2.UpdateQueueRequest} */ public final class UpdateQueueRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.tasks.v2.UpdateQueueRequest) UpdateQueueRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateQueueRequest.newBuilder() to construct. private UpdateQueueRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateQueueRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateQueueRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2.CloudTasksProto .internal_static_google_cloud_tasks_v2_UpdateQueueRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2.CloudTasksProto .internal_static_google_cloud_tasks_v2_UpdateQueueRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2.UpdateQueueRequest.class, com.google.cloud.tasks.v2.UpdateQueueRequest.Builder.class); } private int bitField0_; public static final int QUEUE_FIELD_NUMBER = 1; private com.google.cloud.tasks.v2.Queue queue_; /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the queue field is set. */ @java.lang.Override public boolean hasQueue() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The queue. */ @java.lang.Override public com.google.cloud.tasks.v2.Queue getQueue() { return queue_ == null ? com.google.cloud.tasks.v2.Queue.getDefaultInstance() : queue_; } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.cloud.tasks.v2.QueueOrBuilder getQueueOrBuilder() { return queue_ == null ? com.google.cloud.tasks.v2.Queue.getDefaultInstance() : queue_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getQueue()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getQueue()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.tasks.v2.UpdateQueueRequest)) { return super.equals(obj); } com.google.cloud.tasks.v2.UpdateQueueRequest other = (com.google.cloud.tasks.v2.UpdateQueueRequest) obj; if (hasQueue() != other.hasQueue()) return false; if (hasQueue()) { if (!getQueue().equals(other.getQueue())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQueue()) { hash = (37 * hash) + QUEUE_FIELD_NUMBER; hash = (53 * hash) + getQueue().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2.UpdateQueueRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.tasks.v2.UpdateQueueRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. * </pre> * * Protobuf type {@code google.cloud.tasks.v2.UpdateQueueRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2.UpdateQueueRequest) com.google.cloud.tasks.v2.UpdateQueueRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2.CloudTasksProto .internal_static_google_cloud_tasks_v2_UpdateQueueRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2.CloudTasksProto .internal_static_google_cloud_tasks_v2_UpdateQueueRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2.UpdateQueueRequest.class, com.google.cloud.tasks.v2.UpdateQueueRequest.Builder.class); } // Construct using com.google.cloud.tasks.v2.UpdateQueueRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getQueueFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; queue_ = null; if (queueBuilder_ != null) { queueBuilder_.dispose(); queueBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.tasks.v2.CloudTasksProto .internal_static_google_cloud_tasks_v2_UpdateQueueRequest_descriptor; } @java.lang.Override public com.google.cloud.tasks.v2.UpdateQueueRequest getDefaultInstanceForType() { return com.google.cloud.tasks.v2.UpdateQueueRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.tasks.v2.UpdateQueueRequest build() { com.google.cloud.tasks.v2.UpdateQueueRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.tasks.v2.UpdateQueueRequest buildPartial() { com.google.cloud.tasks.v2.UpdateQueueRequest result = new com.google.cloud.tasks.v2.UpdateQueueRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.tasks.v2.UpdateQueueRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.queue_ = queueBuilder_ == null ? queue_ : queueBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.tasks.v2.UpdateQueueRequest) { return mergeFrom((com.google.cloud.tasks.v2.UpdateQueueRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.tasks.v2.UpdateQueueRequest other) { if (other == com.google.cloud.tasks.v2.UpdateQueueRequest.getDefaultInstance()) return this; if (other.hasQueue()) { mergeQueue(other.getQueue()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getQueueFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.tasks.v2.Queue queue_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.tasks.v2.Queue, com.google.cloud.tasks.v2.Queue.Builder, com.google.cloud.tasks.v2.QueueOrBuilder> queueBuilder_; /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the queue field is set. */ public boolean hasQueue() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The queue. */ public com.google.cloud.tasks.v2.Queue getQueue() { if (queueBuilder_ == null) { return queue_ == null ? com.google.cloud.tasks.v2.Queue.getDefaultInstance() : queue_; } else { return queueBuilder_.getMessage(); } } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQueue(com.google.cloud.tasks.v2.Queue value) { if (queueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } queue_ = value; } else { queueBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQueue(com.google.cloud.tasks.v2.Queue.Builder builderForValue) { if (queueBuilder_ == null) { queue_ = builderForValue.build(); } else { queueBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeQueue(com.google.cloud.tasks.v2.Queue value) { if (queueBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && queue_ != null && queue_ != com.google.cloud.tasks.v2.Queue.getDefaultInstance()) { getQueueBuilder().mergeFrom(value); } else { queue_ = value; } } else { queueBuilder_.mergeFrom(value); } if (queue_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearQueue() { bitField0_ = (bitField0_ & ~0x00000001); queue_ = null; if (queueBuilder_ != null) { queueBuilder_.dispose(); queueBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.tasks.v2.Queue.Builder getQueueBuilder() { bitField0_ |= 0x00000001; onChanged(); return getQueueFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.tasks.v2.QueueOrBuilder getQueueOrBuilder() { if (queueBuilder_ != null) { return queueBuilder_.getMessageOrBuilder(); } else { return queue_ == null ? com.google.cloud.tasks.v2.Queue.getDefaultInstance() : queue_; } } /** * * * <pre> * Required. The queue to create or update. * * The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. * * Output only fields cannot be modified using UpdateQueue. * Any value specified for an output only field will be ignored. * The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. * </pre> * * <code>.google.cloud.tasks.v2.Queue queue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.tasks.v2.Queue, com.google.cloud.tasks.v2.Queue.Builder, com.google.cloud.tasks.v2.QueueOrBuilder> getQueueFieldBuilder() { if (queueBuilder_ == null) { queueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.tasks.v2.Queue, com.google.cloud.tasks.v2.Queue.Builder, com.google.cloud.tasks.v2.QueueOrBuilder>( getQueue(), getParentForChildren(), isClean()); queue_ = null; } return queueBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * A mask used to specify which fields of the queue are being updated. * * If empty, then all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2.UpdateQueueRequest) } // @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.UpdateQueueRequest) private static final com.google.cloud.tasks.v2.UpdateQueueRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.tasks.v2.UpdateQueueRequest(); } public static com.google.cloud.tasks.v2.UpdateQueueRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateQueueRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateQueueRequest>() { @java.lang.Override public UpdateQueueRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateQueueRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateQueueRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.tasks.v2.UpdateQueueRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/harmony
35,714
classlib/modules/luni-kernel/src/main/java/java/lang/System.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.lang; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.nio.channels.Channel; import java.nio.channels.spi.SelectorProvider; import java.security.Policy; import java.util.Map; import java.util.Properties; import java.util.PropertyPermission; /** * Provides access to system-related information and resources including * standard input and output. Enables clients to dynamically load native * libraries. All methods of this class are accessed in a static way and the * class itself can not be instantiated. * * @see Runtime */ public final class System { // The standard input, output, and error streams. // Typically, these are connected to the shell which // ran the Java program. /** * Default input stream. */ public static final InputStream in; /** * Default output stream. */ public static final PrintStream out; /** * Default error output stream. */ public static final PrintStream err; // Get a ref to the Runtime instance for faster lookup private static final Runtime RUNTIME = Runtime.getRuntime(); /** * The System Properties table. */ private static Properties systemProperties; // The System default SecurityManager private static SecurityManager security; // Indicates whether the classes needed for // permission checks was initialized or not private static boolean security_initialized; // Initialize all the slots in System on first use. static { // Fill in the properties from the VM information. ensureProperties(); // Set up standard in, out, and err. err = new String.ConsolePrintStream(new BufferedOutputStream(new FileOutputStream( FileDescriptor.err))); out = new String.ConsolePrintStream(new BufferedOutputStream(new FileOutputStream( FileDescriptor.out))); in = new BufferedInputStream(new FileInputStream(FileDescriptor.in)); } /** * Sets the standard input stream to the given user defined input stream. * * @param newIn * the user defined input stream to set as the standard input * stream. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPermission()} method does not allow the change of the * stream. */ @SuppressWarnings("unused") public static void setIn(InputStream newIn) { SecurityManager secMgr = System.getSecurityManager(); setFieldImpl("in", newIn); } /** * Sets the standard output stream to the given user defined output stream. * * @param newOut * the user defined output stream to set as the standard output * stream. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPermission()} method does not allow the change of the * stream. */ @SuppressWarnings("unused") public static void setOut(java.io.PrintStream newOut) { SecurityManager secMgr = System.getSecurityManager(); setFieldImpl("out", newOut); } /** * Sets the standard error output stream to the given user defined output * stream. * * @param newErr * the user defined output stream to set as the standard error * output stream. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPermission()} method does not allow the change of the * stream. */ @SuppressWarnings("unused") public static void setErr(java.io.PrintStream newErr) { SecurityManager secMgr = System.getSecurityManager(); } /** * Prevents this class from being instantiated. */ private System() { } /** * Copies the number of {@code length} elements of the Array {@code src} * starting at the offset {@code srcPos} into the Array {@code dest} at * the position {@code destPos}. * * @param src * the source array to copy the content. * @param srcPos * the starting index of the content in {@code src}. * @param dest * the destination array to copy the data into. * @param destPos * the starting index for the copied content in {@code dest}. * @param length * the number of elements of the {@code array1} content they have * to be copied. */ public static void arraycopy(Object src, int srcPos, Object dest, int destPos, int length) { // sending getClass() to both arguments will check for null Class<?> type1 = src.getClass(); Class<?> type2 = dest.getClass(); if (!type1.isArray() || !type2.isArray()) { throw new ArrayStoreException(); } Class<?> componentType1 = type1.getComponentType(); Class<?> componentType2 = type2.getComponentType(); if (!componentType1.isPrimitive()) { if (componentType2.isPrimitive()) { throw new ArrayStoreException(); } arraycopy((Object[]) src, srcPos, (Object[]) dest, destPos, length); } else { if (componentType2 != componentType1) { throw new ArrayStoreException(); } if (componentType1 == Integer.TYPE) { arraycopy((int[]) src, srcPos, (int[]) dest, destPos, length); } else if (componentType1 == Byte.TYPE) { arraycopy((byte[]) src, srcPos, (byte[]) dest, destPos, length); } else if (componentType1 == Long.TYPE) { arraycopy((long[]) src, srcPos, (long[]) dest, destPos, length); } else if (componentType1 == Short.TYPE) { arraycopy((short[]) src, srcPos, (short[]) dest, destPos, length); } else if (componentType1 == Character.TYPE) { arraycopy((char[]) src, srcPos, (char[]) dest, destPos, length); } else if (componentType1 == Boolean.TYPE) { arraycopy((boolean[]) src, srcPos, (boolean[]) dest, destPos, length); } else if (componentType1 == Double.TYPE) { arraycopy((double[]) src, srcPos, (double[]) dest, destPos, length); } else if (componentType1 == Float.TYPE) { arraycopy((float[]) src, srcPos, (float[]) dest, destPos, length); } } } /** * Private version of the arraycopy method (used by the jit for reference * arraycopies) */ private static void arraycopy(Object[] A1, int offset1, Object[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(int[] A1, int offset1, int[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(byte[] A1, int offset1, byte[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(short[] A1, int offset1, short[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(long[] A1, int offset1, long[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(char[] A1, int offset1, char[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(boolean[] A1, int offset1, boolean[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(double[] A1, int offset1, double[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Copies the contents of <code>A1</code> starting at offset * <code>offset1</code> into <code>A2</code> starting at offset * <code>offset2</code> for <code>length</code> elements. * * @param A1 the array to copy out of * @param offset1 the starting index in array1 * @param A2 the array to copy into * @param offset2 the starting index in array2 * @param length the number of elements in the array to copy */ private static void arraycopy(float[] A1, int offset1, float[] A2, int offset2, int length) { if (offset1 >= 0 && offset2 >= 0 && length >= 0 && length <= A1.length - offset1 && length <= A2.length - offset2) { // Check if this is a forward or backwards arraycopy if (A1 != A2 || offset1 > offset2 || offset1 + length <= offset2) { for (int i = 0; i < length; ++i) { A2[offset2 + i] = A1[offset1 + i]; } } else { for (int i = length - 1; i >= 0; --i) { A2[offset2 + i] = A1[offset1 + i]; } } } else { throw new ArrayIndexOutOfBoundsException(); } } /** * Returns the current system time in milliseconds since January 1, 1970 * 00:00:00 UTC. This method shouldn't be used for measuring timeouts or * other elapsed time measurements, as changing the system time can affect * the results. * * @return the local system time in milliseconds. */ public static native long currentTimeMillis(); /** * Returns the current timestamp of the most precise timer available on the * local system. This timestamp can only be used to measure an elapsed * period by comparing it against another timestamp. It cannot be used as a * very exact system time expression. * * @return the current timestamp in nanoseconds. */ public static native long nanoTime(); private static final int InitLocale = 0; private static final int PlatformEncoding = 1; private static final int FileEncoding = 2; private static final int OSEncoding = 3; /** * If systemProperties is unset, then create a new one based on the values * provided by the virtual machine. */ private static void ensureProperties() { systemProperties = new Properties(); String platformEncoding = null; String fileEncoding, osEncoding = null; String definedFileEncoding = getEncoding(FileEncoding); String definedOSEncoding = getEncoding(OSEncoding); if (definedFileEncoding != null) { fileEncoding = definedFileEncoding; // if file.encoding is defined, and os.encoding is not, use the // detected // platform encoding for os.encoding if (definedOSEncoding == null) { platformEncoding = getEncoding(PlatformEncoding); osEncoding = platformEncoding; } else { getEncoding(InitLocale); } } else { platformEncoding = getEncoding(PlatformEncoding); fileEncoding = platformEncoding; } // if os.encoding is not defined, file.encoding will be used if (osEncoding == null) { osEncoding = definedOSEncoding; } if (osEncoding != null) { systemProperties.put("os.encoding", osEncoding); } systemProperties.put("file.encoding", fileEncoding); systemProperties.put("java.version", "1.5 subset"); systemProperties.put("java.specification.version", "1.5"); systemProperties.put("java.specification.vendor", "Sun Microsystems Inc."); systemProperties.put("java.specification.name", "Java Platform API Specification"); systemProperties.put("com.ibm.oti.configuration", "clear"); systemProperties.put("com.ibm.oti.configuration.dir", "jclClear"); String[] list = getPropertyList(); for (int i = 0; i < list.length; i += 2) { String key = list[i]; if (key == null) { break; } systemProperties.put(key, list[i + 1]); } String consoleEncoding = (String) systemProperties.get("console.encoding"); if (consoleEncoding == null) { if (platformEncoding == null) { platformEncoding = getEncoding(PlatformEncoding); } consoleEncoding = platformEncoding; systemProperties.put("console.encoding", consoleEncoding); } } /** * Causes the virtual machine to stop running and the program to exit. If * {@link #runFinalizersOnExit(boolean)} has been previously invoked with a * {@code true} argument, then all objects will be properly * garbage-collected and finalized first. * * @param code * the return code. * @throws SecurityException * if the running thread has not enough permission to exit the * virtual machine. * @see SecurityManager#checkExit */ public static void exit(int code) { RUNTIME.exit(code); } /** * Indicates to the virtual machine that it would be a good time to run the * garbage collector. Note that this is a hint only. There is no guarantee * that the garbage collector will actually be run. */ public static void gc() { RUNTIME.gc(); } /** * Returns the value of the environment variable with the given name {@code * var}. * * @param name * the name of the environment variable. * @return the value of the specified environment variable or {@code null} * if no variable exists with the given name. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPermission()} method does not allow the querying of * single environment variables. */ public static String getenv(String name) { if (name == null) { throw new NullPointerException(); } SecurityManager secMgr = System.getSecurityManager(); if (secMgr != null) { secMgr.checkPermission(new RuntimePermission("getenv." + name)); } throw new Error(); } /** * Returns an unmodifiable map of all available environment variables. * * @return the map representing all environment variables. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPermission()} method does not allow the querying of * all environment variables. */ public static Map<String, String> getenv() { SecurityManager secMgr = System.getSecurityManager(); if (secMgr != null) { secMgr.checkPermission(new RuntimePermission("getenv.*")); } throw new Error(); } /** * Returns the inherited channel from the creator of the current virtual * machine. * * @return the inherited {@link Channel} or {@code null} if none exists. * @throws IOException * if an I/O error occurred. * @see SelectorProvider * @see SelectorProvider#inheritedChannel() */ public static Channel inheritedChannel() throws IOException { return SelectorProvider.provider().inheritedChannel(); } /** * Returns the system properties. Note that this is not a copy, so that * changes made to the returned Properties object will be reflected in * subsequent calls to getProperty and getProperties. * * @return the system properties. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPropertiesAccess()} method does not allow the operation. */ public static Properties getProperties() { SecurityManager secMgr = System.getSecurityManager(); if (secMgr != null) { secMgr.checkPropertiesAccess(); } return systemProperties; } /** * Returns the system properties without any security checks. This is used * for access from within java.lang. * * @return the system properties */ static Properties internalGetProperties() { return systemProperties; } /** * Returns the value of a particular system property or {@code null} if no * such property exists. * <p> * The properties currently provided by the virtual machine are: * * <pre> * java.vendor.url * java.class.path * user.home * java.class.version * os.version * java.vendor * user.dir * user.timezone * path.separator * os.name * os.arch * line.separator * file.separator * user.name * java.version * java.home * </pre> * * @param prop * the name of the system property to look up. * @return the value of the specified system property or {@code null} if the * property doesn't exist. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPropertyAccess()} method does not allow the operation. */ public static String getProperty(String prop) { return getProperty(prop, null); } /** * Returns the value of a particular system property. The {@code * defaultValue} will be returned if no such property has been found. * * @param prop * the name of the system property to look up. * @param defaultValue * the return value if the system property with the given name * does not exist. * @return the value of the specified system property or the {@code * defaultValue} if the property does not exist. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPropertyAccess()} method does not allow the operation. */ public static String getProperty(String prop, String defaultValue) { if (prop.length() == 0) { throw new IllegalArgumentException(); } SecurityManager secMgr = System.getSecurityManager(); if (secMgr != null) { secMgr.checkPropertyAccess(prop); } return systemProperties.getProperty(prop, defaultValue); } /** * Sets the value of a particular system property. * * @param prop * the name of the system property to be changed. * @param value * the value to associate with the given property {@code prop}. * @return the old value of the property or {@code null} if the property * didn't exist. * @throws SecurityException * if a security manager exists and write access to the * specified property is not allowed. */ public static String setProperty(String prop, String value) { if (prop.length() == 0) { throw new IllegalArgumentException(); } SecurityManager secMgr = System.getSecurityManager(); if (secMgr != null) { secMgr.checkPermission(new PropertyPermission(prop, "write")); } return (String) systemProperties.setProperty(prop, value); } /** * Removes a specific system property. * * @param key * the name of the system property to be removed. * @return the property value or {@code null} if the property didn't exist. * @throws NullPointerException * if the argument {@code key} is {@code null}. * @throws IllegalArgumentException * if the argument {@code key} is empty. * @throws SecurityException * if a security manager exists and write access to the * specified property is not allowed. * @since 1.5 */ public static String clearProperty(String key) { if (key == null) { throw new NullPointerException(); } if (key.length() == 0) { throw new IllegalArgumentException(); } SecurityManager secMgr = System.getSecurityManager(); if (secMgr != null) { secMgr.checkPermission(new PropertyPermission(key, "write")); } return (String) systemProperties.remove(key); } /** * Answers an array of Strings containing key..value pairs (in consecutive * array elements) which represent the starting values for the system * properties as provided by the virtual machine. * * @return the default values for the system properties. */ private static native String[] getPropertyList(); /** * Return the requested encoding. 0 - initialize locale 1 - detected * platform encoding 2 - command line defined file.encoding 3 - command line * defined os.encoding */ private static native String getEncoding(int type); /** * Returns the active security manager. * * @return the system security manager object. */ public static SecurityManager getSecurityManager() { return security; } /** * Returns an integer hash code for the parameter. The hash code returned is * the same one that would be returned by the method {@code * java.lang.Object.hashCode()}, whether or not the object's class has * overridden hashCode(). The hash code for {@code null} is {@code 0}. * * @param anObject * the object to calculate the hash code. * @return the hash code for the given object. * @see java.lang.Object#hashCode */ public static native int identityHashCode(Object anObject); /** * Loads the specified file as a dynamic library. * * @param pathName * the path of the file to be loaded. * @throws SecurityException * if the library was not allowed to be loaded. */ public static void load(String pathName) { SecurityManager smngr = System.getSecurityManager(); if (smngr != null) { smngr.checkLink(pathName); } ClassLoader.loadLibraryWithPath(pathName, ClassLoader.callerClassLoader(), null); } /** * Loads and links the shared library with the given name {@code libName}. * The file will be searched in the default directory for shared libraries * of the local system. * * @param libName * the name of the library to load. * @throws UnsatisfiedLinkError * if the library could not be loaded. * @throws SecurityException * if the library was not allowed to be loaded. */ public static void loadLibrary(String libName) { ClassLoader.loadLibraryWithClassLoader(libName, ClassLoader.callerClassLoader()); } /** * Provides a hint to the virtual machine that it would be useful to attempt * to perform any outstanding object finalizations. */ public static void runFinalization() { RUNTIME.runFinalization(); } /** * Ensures that, when the virtual machine is about to exit, all objects are * finalized. Note that all finalization which occurs when the system is * exiting is performed after all running threads have been terminated. * * @param flag * the flag determines if finalization on exit is enabled. * @deprecated this method is unsafe. */ @SuppressWarnings("deprecation") @Deprecated public static void runFinalizersOnExit(boolean flag) { Runtime.runFinalizersOnExit(flag); } /** * Sets all system properties. Note that the object which is passed in * not copied, so that subsequent changes made to the object will be * reflected in calls to getProperty and getProperties. * * @param p * the new system property. * @throws SecurityException * if a {@link SecurityManager} is installed and its {@code * checkPropertiesAccess()} method does not allow the operation. */ public static void setProperties(Properties p) { SecurityManager secMgr = System.getSecurityManager(); if (secMgr != null) { secMgr.checkPropertiesAccess(); } if (p == null) { ensureProperties(); } else { systemProperties = p; } } /** * Sets the active security manager. Note that once the security manager has * been set, it can not be changed. Attempts to do that will cause a * security exception. * * @param sm * the new security manager. * * @throws SecurityException * if the security manager has already been set and if its * checkPermission method does not allow to redefine the * security manager. */ public static void setSecurityManager(final SecurityManager sm) { if (!security_initialized) { try { // Preload and initialize Policy implementation classes // otherwise we could go recursive Policy.getPolicy(); } catch (Exception e) { } security_initialized = true; } security = sm; } /** * Returns the platform specific file name format for the shared library * named by the argument. * * @param userLibName * the name of the library to look up. * @return the platform specific filename for the library */ public static native String mapLibraryName(String userLibName); /** * Sets the value of the named static field in the receiver to the passed in * argument. * * @param fieldName * the name of the field to set, one of in, out, or err * @param stream * the new value of the field */ private static native void setFieldImpl(String fieldName, Object stream); }
apache/hive
35,550
standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AllocateTableWriteIdsRequest.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hive.metastore.api; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class AllocateTableWriteIdsRequest implements org.apache.thrift.TBase<AllocateTableWriteIdsRequest, AllocateTableWriteIdsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<AllocateTableWriteIdsRequest> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AllocateTableWriteIdsRequest"); private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbName", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField TXN_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("txnIds", org.apache.thrift.protocol.TType.LIST, (short)3); private static final org.apache.thrift.protocol.TField REPL_POLICY_FIELD_DESC = new org.apache.thrift.protocol.TField("replPolicy", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField SRC_TXN_TO_WRITE_ID_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("srcTxnToWriteIdList", org.apache.thrift.protocol.TType.LIST, (short)5); private static final org.apache.thrift.protocol.TField REALLOCATE_FIELD_DESC = new org.apache.thrift.protocol.TField("reallocate", org.apache.thrift.protocol.TType.BOOL, (short)6); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new AllocateTableWriteIdsRequestStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new AllocateTableWriteIdsRequestTupleSchemeFactory(); private @org.apache.thrift.annotation.Nullable java.lang.String dbName; // required private @org.apache.thrift.annotation.Nullable java.lang.String tableName; // required private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.Long> txnIds; // optional private @org.apache.thrift.annotation.Nullable java.lang.String replPolicy; // optional private @org.apache.thrift.annotation.Nullable java.util.List<TxnToWriteId> srcTxnToWriteIdList; // optional private boolean reallocate; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DB_NAME((short)1, "dbName"), TABLE_NAME((short)2, "tableName"), TXN_IDS((short)3, "txnIds"), REPL_POLICY((short)4, "replPolicy"), SRC_TXN_TO_WRITE_ID_LIST((short)5, "srcTxnToWriteIdList"), REALLOCATE((short)6, "reallocate"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DB_NAME return DB_NAME; case 2: // TABLE_NAME return TABLE_NAME; case 3: // TXN_IDS return TXN_IDS; case 4: // REPL_POLICY return REPL_POLICY; case 5: // SRC_TXN_TO_WRITE_ID_LIST return SRC_TXN_TO_WRITE_ID_LIST; case 6: // REALLOCATE return REALLOCATE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __REALLOCATE_ISSET_ID = 0; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.TXN_IDS,_Fields.REPL_POLICY,_Fields.SRC_TXN_TO_WRITE_ID_LIST,_Fields.REALLOCATE}; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("dbName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.TXN_IDS, new org.apache.thrift.meta_data.FieldMetaData("txnIds", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)))); tmpMap.put(_Fields.REPL_POLICY, new org.apache.thrift.meta_data.FieldMetaData("replPolicy", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.SRC_TXN_TO_WRITE_ID_LIST, new org.apache.thrift.meta_data.FieldMetaData("srcTxnToWriteIdList", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TxnToWriteId.class)))); tmpMap.put(_Fields.REALLOCATE, new org.apache.thrift.meta_data.FieldMetaData("reallocate", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AllocateTableWriteIdsRequest.class, metaDataMap); } public AllocateTableWriteIdsRequest() { this.reallocate = false; } public AllocateTableWriteIdsRequest( java.lang.String dbName, java.lang.String tableName) { this(); this.dbName = dbName; this.tableName = tableName; } /** * Performs a deep copy on <i>other</i>. */ public AllocateTableWriteIdsRequest(AllocateTableWriteIdsRequest other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetDbName()) { this.dbName = other.dbName; } if (other.isSetTableName()) { this.tableName = other.tableName; } if (other.isSetTxnIds()) { java.util.List<java.lang.Long> __this__txnIds = new java.util.ArrayList<java.lang.Long>(other.txnIds); this.txnIds = __this__txnIds; } if (other.isSetReplPolicy()) { this.replPolicy = other.replPolicy; } if (other.isSetSrcTxnToWriteIdList()) { java.util.List<TxnToWriteId> __this__srcTxnToWriteIdList = new java.util.ArrayList<TxnToWriteId>(other.srcTxnToWriteIdList.size()); for (TxnToWriteId other_element : other.srcTxnToWriteIdList) { __this__srcTxnToWriteIdList.add(new TxnToWriteId(other_element)); } this.srcTxnToWriteIdList = __this__srcTxnToWriteIdList; } this.reallocate = other.reallocate; } public AllocateTableWriteIdsRequest deepCopy() { return new AllocateTableWriteIdsRequest(this); } @Override public void clear() { this.dbName = null; this.tableName = null; this.txnIds = null; this.replPolicy = null; this.srcTxnToWriteIdList = null; this.reallocate = false; } @org.apache.thrift.annotation.Nullable public java.lang.String getDbName() { return this.dbName; } public void setDbName(@org.apache.thrift.annotation.Nullable java.lang.String dbName) { this.dbName = dbName; } public void unsetDbName() { this.dbName = null; } /** Returns true if field dbName is set (has been assigned a value) and false otherwise */ public boolean isSetDbName() { return this.dbName != null; } public void setDbNameIsSet(boolean value) { if (!value) { this.dbName = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getTableName() { return this.tableName; } public void setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) { this.tableName = tableName; } public void unsetTableName() { this.tableName = null; } /** Returns true if field tableName is set (has been assigned a value) and false otherwise */ public boolean isSetTableName() { return this.tableName != null; } public void setTableNameIsSet(boolean value) { if (!value) { this.tableName = null; } } public int getTxnIdsSize() { return (this.txnIds == null) ? 0 : this.txnIds.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<java.lang.Long> getTxnIdsIterator() { return (this.txnIds == null) ? null : this.txnIds.iterator(); } public void addToTxnIds(long elem) { if (this.txnIds == null) { this.txnIds = new java.util.ArrayList<java.lang.Long>(); } this.txnIds.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<java.lang.Long> getTxnIds() { return this.txnIds; } public void setTxnIds(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.Long> txnIds) { this.txnIds = txnIds; } public void unsetTxnIds() { this.txnIds = null; } /** Returns true if field txnIds is set (has been assigned a value) and false otherwise */ public boolean isSetTxnIds() { return this.txnIds != null; } public void setTxnIdsIsSet(boolean value) { if (!value) { this.txnIds = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getReplPolicy() { return this.replPolicy; } public void setReplPolicy(@org.apache.thrift.annotation.Nullable java.lang.String replPolicy) { this.replPolicy = replPolicy; } public void unsetReplPolicy() { this.replPolicy = null; } /** Returns true if field replPolicy is set (has been assigned a value) and false otherwise */ public boolean isSetReplPolicy() { return this.replPolicy != null; } public void setReplPolicyIsSet(boolean value) { if (!value) { this.replPolicy = null; } } public int getSrcTxnToWriteIdListSize() { return (this.srcTxnToWriteIdList == null) ? 0 : this.srcTxnToWriteIdList.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<TxnToWriteId> getSrcTxnToWriteIdListIterator() { return (this.srcTxnToWriteIdList == null) ? null : this.srcTxnToWriteIdList.iterator(); } public void addToSrcTxnToWriteIdList(TxnToWriteId elem) { if (this.srcTxnToWriteIdList == null) { this.srcTxnToWriteIdList = new java.util.ArrayList<TxnToWriteId>(); } this.srcTxnToWriteIdList.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<TxnToWriteId> getSrcTxnToWriteIdList() { return this.srcTxnToWriteIdList; } public void setSrcTxnToWriteIdList(@org.apache.thrift.annotation.Nullable java.util.List<TxnToWriteId> srcTxnToWriteIdList) { this.srcTxnToWriteIdList = srcTxnToWriteIdList; } public void unsetSrcTxnToWriteIdList() { this.srcTxnToWriteIdList = null; } /** Returns true if field srcTxnToWriteIdList is set (has been assigned a value) and false otherwise */ public boolean isSetSrcTxnToWriteIdList() { return this.srcTxnToWriteIdList != null; } public void setSrcTxnToWriteIdListIsSet(boolean value) { if (!value) { this.srcTxnToWriteIdList = null; } } public boolean isReallocate() { return this.reallocate; } public void setReallocate(boolean reallocate) { this.reallocate = reallocate; setReallocateIsSet(true); } public void unsetReallocate() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __REALLOCATE_ISSET_ID); } /** Returns true if field reallocate is set (has been assigned a value) and false otherwise */ public boolean isSetReallocate() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __REALLOCATE_ISSET_ID); } public void setReallocateIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __REALLOCATE_ISSET_ID, value); } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case DB_NAME: if (value == null) { unsetDbName(); } else { setDbName((java.lang.String)value); } break; case TABLE_NAME: if (value == null) { unsetTableName(); } else { setTableName((java.lang.String)value); } break; case TXN_IDS: if (value == null) { unsetTxnIds(); } else { setTxnIds((java.util.List<java.lang.Long>)value); } break; case REPL_POLICY: if (value == null) { unsetReplPolicy(); } else { setReplPolicy((java.lang.String)value); } break; case SRC_TXN_TO_WRITE_ID_LIST: if (value == null) { unsetSrcTxnToWriteIdList(); } else { setSrcTxnToWriteIdList((java.util.List<TxnToWriteId>)value); } break; case REALLOCATE: if (value == null) { unsetReallocate(); } else { setReallocate((java.lang.Boolean)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case DB_NAME: return getDbName(); case TABLE_NAME: return getTableName(); case TXN_IDS: return getTxnIds(); case REPL_POLICY: return getReplPolicy(); case SRC_TXN_TO_WRITE_ID_LIST: return getSrcTxnToWriteIdList(); case REALLOCATE: return isReallocate(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case DB_NAME: return isSetDbName(); case TABLE_NAME: return isSetTableName(); case TXN_IDS: return isSetTxnIds(); case REPL_POLICY: return isSetReplPolicy(); case SRC_TXN_TO_WRITE_ID_LIST: return isSetSrcTxnToWriteIdList(); case REALLOCATE: return isSetReallocate(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof AllocateTableWriteIdsRequest) return this.equals((AllocateTableWriteIdsRequest)that); return false; } public boolean equals(AllocateTableWriteIdsRequest that) { if (that == null) return false; if (this == that) return true; boolean this_present_dbName = true && this.isSetDbName(); boolean that_present_dbName = true && that.isSetDbName(); if (this_present_dbName || that_present_dbName) { if (!(this_present_dbName && that_present_dbName)) return false; if (!this.dbName.equals(that.dbName)) return false; } boolean this_present_tableName = true && this.isSetTableName(); boolean that_present_tableName = true && that.isSetTableName(); if (this_present_tableName || that_present_tableName) { if (!(this_present_tableName && that_present_tableName)) return false; if (!this.tableName.equals(that.tableName)) return false; } boolean this_present_txnIds = true && this.isSetTxnIds(); boolean that_present_txnIds = true && that.isSetTxnIds(); if (this_present_txnIds || that_present_txnIds) { if (!(this_present_txnIds && that_present_txnIds)) return false; if (!this.txnIds.equals(that.txnIds)) return false; } boolean this_present_replPolicy = true && this.isSetReplPolicy(); boolean that_present_replPolicy = true && that.isSetReplPolicy(); if (this_present_replPolicy || that_present_replPolicy) { if (!(this_present_replPolicy && that_present_replPolicy)) return false; if (!this.replPolicy.equals(that.replPolicy)) return false; } boolean this_present_srcTxnToWriteIdList = true && this.isSetSrcTxnToWriteIdList(); boolean that_present_srcTxnToWriteIdList = true && that.isSetSrcTxnToWriteIdList(); if (this_present_srcTxnToWriteIdList || that_present_srcTxnToWriteIdList) { if (!(this_present_srcTxnToWriteIdList && that_present_srcTxnToWriteIdList)) return false; if (!this.srcTxnToWriteIdList.equals(that.srcTxnToWriteIdList)) return false; } boolean this_present_reallocate = true && this.isSetReallocate(); boolean that_present_reallocate = true && that.isSetReallocate(); if (this_present_reallocate || that_present_reallocate) { if (!(this_present_reallocate && that_present_reallocate)) return false; if (this.reallocate != that.reallocate) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetDbName()) ? 131071 : 524287); if (isSetDbName()) hashCode = hashCode * 8191 + dbName.hashCode(); hashCode = hashCode * 8191 + ((isSetTableName()) ? 131071 : 524287); if (isSetTableName()) hashCode = hashCode * 8191 + tableName.hashCode(); hashCode = hashCode * 8191 + ((isSetTxnIds()) ? 131071 : 524287); if (isSetTxnIds()) hashCode = hashCode * 8191 + txnIds.hashCode(); hashCode = hashCode * 8191 + ((isSetReplPolicy()) ? 131071 : 524287); if (isSetReplPolicy()) hashCode = hashCode * 8191 + replPolicy.hashCode(); hashCode = hashCode * 8191 + ((isSetSrcTxnToWriteIdList()) ? 131071 : 524287); if (isSetSrcTxnToWriteIdList()) hashCode = hashCode * 8191 + srcTxnToWriteIdList.hashCode(); hashCode = hashCode * 8191 + ((isSetReallocate()) ? 131071 : 524287); if (isSetReallocate()) hashCode = hashCode * 8191 + ((reallocate) ? 131071 : 524287); return hashCode; } @Override public int compareTo(AllocateTableWriteIdsRequest other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetDbName(), other.isSetDbName()); if (lastComparison != 0) { return lastComparison; } if (isSetDbName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbName, other.dbName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetTableName(), other.isSetTableName()); if (lastComparison != 0) { return lastComparison; } if (isSetTableName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, other.tableName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetTxnIds(), other.isSetTxnIds()); if (lastComparison != 0) { return lastComparison; } if (isSetTxnIds()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.txnIds, other.txnIds); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetReplPolicy(), other.isSetReplPolicy()); if (lastComparison != 0) { return lastComparison; } if (isSetReplPolicy()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replPolicy, other.replPolicy); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetSrcTxnToWriteIdList(), other.isSetSrcTxnToWriteIdList()); if (lastComparison != 0) { return lastComparison; } if (isSetSrcTxnToWriteIdList()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.srcTxnToWriteIdList, other.srcTxnToWriteIdList); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetReallocate(), other.isSetReallocate()); if (lastComparison != 0) { return lastComparison; } if (isSetReallocate()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.reallocate, other.reallocate); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("AllocateTableWriteIdsRequest("); boolean first = true; sb.append("dbName:"); if (this.dbName == null) { sb.append("null"); } else { sb.append(this.dbName); } first = false; if (!first) sb.append(", "); sb.append("tableName:"); if (this.tableName == null) { sb.append("null"); } else { sb.append(this.tableName); } first = false; if (isSetTxnIds()) { if (!first) sb.append(", "); sb.append("txnIds:"); if (this.txnIds == null) { sb.append("null"); } else { sb.append(this.txnIds); } first = false; } if (isSetReplPolicy()) { if (!first) sb.append(", "); sb.append("replPolicy:"); if (this.replPolicy == null) { sb.append("null"); } else { sb.append(this.replPolicy); } first = false; } if (isSetSrcTxnToWriteIdList()) { if (!first) sb.append(", "); sb.append("srcTxnToWriteIdList:"); if (this.srcTxnToWriteIdList == null) { sb.append("null"); } else { sb.append(this.srcTxnToWriteIdList); } first = false; } if (isSetReallocate()) { if (!first) sb.append(", "); sb.append("reallocate:"); sb.append(this.reallocate); first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetDbName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'dbName' is unset! Struct:" + toString()); } if (!isSetTableName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'tableName' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class AllocateTableWriteIdsRequestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public AllocateTableWriteIdsRequestStandardScheme getScheme() { return new AllocateTableWriteIdsRequestStandardScheme(); } } private static class AllocateTableWriteIdsRequestStandardScheme extends org.apache.thrift.scheme.StandardScheme<AllocateTableWriteIdsRequest> { public void read(org.apache.thrift.protocol.TProtocol iprot, AllocateTableWriteIdsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DB_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.dbName = iprot.readString(); struct.setDbNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // TABLE_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.tableName = iprot.readString(); struct.setTableNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // TXN_IDS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list868 = iprot.readListBegin(); struct.txnIds = new java.util.ArrayList<java.lang.Long>(_list868.size); long _elem869; for (int _i870 = 0; _i870 < _list868.size; ++_i870) { _elem869 = iprot.readI64(); struct.txnIds.add(_elem869); } iprot.readListEnd(); } struct.setTxnIdsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // REPL_POLICY if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.replPolicy = iprot.readString(); struct.setReplPolicyIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // SRC_TXN_TO_WRITE_ID_LIST if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list871 = iprot.readListBegin(); struct.srcTxnToWriteIdList = new java.util.ArrayList<TxnToWriteId>(_list871.size); @org.apache.thrift.annotation.Nullable TxnToWriteId _elem872; for (int _i873 = 0; _i873 < _list871.size; ++_i873) { _elem872 = new TxnToWriteId(); _elem872.read(iprot); struct.srcTxnToWriteIdList.add(_elem872); } iprot.readListEnd(); } struct.setSrcTxnToWriteIdListIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // REALLOCATE if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { struct.reallocate = iprot.readBool(); struct.setReallocateIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, AllocateTableWriteIdsRequest struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.dbName != null) { oprot.writeFieldBegin(DB_NAME_FIELD_DESC); oprot.writeString(struct.dbName); oprot.writeFieldEnd(); } if (struct.tableName != null) { oprot.writeFieldBegin(TABLE_NAME_FIELD_DESC); oprot.writeString(struct.tableName); oprot.writeFieldEnd(); } if (struct.txnIds != null) { if (struct.isSetTxnIds()) { oprot.writeFieldBegin(TXN_IDS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.txnIds.size())); for (long _iter874 : struct.txnIds) { oprot.writeI64(_iter874); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.replPolicy != null) { if (struct.isSetReplPolicy()) { oprot.writeFieldBegin(REPL_POLICY_FIELD_DESC); oprot.writeString(struct.replPolicy); oprot.writeFieldEnd(); } } if (struct.srcTxnToWriteIdList != null) { if (struct.isSetSrcTxnToWriteIdList()) { oprot.writeFieldBegin(SRC_TXN_TO_WRITE_ID_LIST_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.srcTxnToWriteIdList.size())); for (TxnToWriteId _iter875 : struct.srcTxnToWriteIdList) { _iter875.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.isSetReallocate()) { oprot.writeFieldBegin(REALLOCATE_FIELD_DESC); oprot.writeBool(struct.reallocate); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class AllocateTableWriteIdsRequestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public AllocateTableWriteIdsRequestTupleScheme getScheme() { return new AllocateTableWriteIdsRequestTupleScheme(); } } private static class AllocateTableWriteIdsRequestTupleScheme extends org.apache.thrift.scheme.TupleScheme<AllocateTableWriteIdsRequest> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, AllocateTableWriteIdsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; oprot.writeString(struct.dbName); oprot.writeString(struct.tableName); java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetTxnIds()) { optionals.set(0); } if (struct.isSetReplPolicy()) { optionals.set(1); } if (struct.isSetSrcTxnToWriteIdList()) { optionals.set(2); } if (struct.isSetReallocate()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.isSetTxnIds()) { { oprot.writeI32(struct.txnIds.size()); for (long _iter876 : struct.txnIds) { oprot.writeI64(_iter876); } } } if (struct.isSetReplPolicy()) { oprot.writeString(struct.replPolicy); } if (struct.isSetSrcTxnToWriteIdList()) { { oprot.writeI32(struct.srcTxnToWriteIdList.size()); for (TxnToWriteId _iter877 : struct.srcTxnToWriteIdList) { _iter877.write(oprot); } } } if (struct.isSetReallocate()) { oprot.writeBool(struct.reallocate); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, AllocateTableWriteIdsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; struct.dbName = iprot.readString(); struct.setDbNameIsSet(true); struct.tableName = iprot.readString(); struct.setTableNameIsSet(true); java.util.BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list878 = iprot.readListBegin(org.apache.thrift.protocol.TType.I64); struct.txnIds = new java.util.ArrayList<java.lang.Long>(_list878.size); long _elem879; for (int _i880 = 0; _i880 < _list878.size; ++_i880) { _elem879 = iprot.readI64(); struct.txnIds.add(_elem879); } } struct.setTxnIdsIsSet(true); } if (incoming.get(1)) { struct.replPolicy = iprot.readString(); struct.setReplPolicyIsSet(true); } if (incoming.get(2)) { { org.apache.thrift.protocol.TList _list881 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); struct.srcTxnToWriteIdList = new java.util.ArrayList<TxnToWriteId>(_list881.size); @org.apache.thrift.annotation.Nullable TxnToWriteId _elem882; for (int _i883 = 0; _i883 < _list881.size; ++_i883) { _elem882 = new TxnToWriteId(); _elem882.read(iprot); struct.srcTxnToWriteIdList.add(_elem882); } } struct.setSrcTxnToWriteIdListIsSet(true); } if (incoming.get(3)) { struct.reallocate = iprot.readBool(); struct.setReallocateIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
googleads/google-ads-java
35,656
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/common/LookalikeUserListInfo.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/common/user_lists.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.common; /** * <pre> * LookalikeUserlist, composed of users similar to those * of a configurable seed (set of UserLists) * </pre> * * Protobuf type {@code google.ads.googleads.v19.common.LookalikeUserListInfo} */ public final class LookalikeUserListInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.common.LookalikeUserListInfo) LookalikeUserListInfoOrBuilder { private static final long serialVersionUID = 0L; // Use LookalikeUserListInfo.newBuilder() to construct. private LookalikeUserListInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LookalikeUserListInfo() { seedUserListIds_ = emptyLongList(); expansionLevel_ = 0; countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new LookalikeUserListInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.common.UserListsProto.internal_static_google_ads_googleads_v19_common_LookalikeUserListInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.common.UserListsProto.internal_static_google_ads_googleads_v19_common_LookalikeUserListInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.common.LookalikeUserListInfo.class, com.google.ads.googleads.v19.common.LookalikeUserListInfo.Builder.class); } public static final int SEED_USER_LIST_IDS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private com.google.protobuf.Internal.LongList seedUserListIds_ = emptyLongList(); /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return A list containing the seedUserListIds. */ @java.lang.Override public java.util.List<java.lang.Long> getSeedUserListIdsList() { return seedUserListIds_; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return The count of seedUserListIds. */ public int getSeedUserListIdsCount() { return seedUserListIds_.size(); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index of the element to return. * @return The seedUserListIds at the given index. */ public long getSeedUserListIds(int index) { return seedUserListIds_.getLong(index); } private int seedUserListIdsMemoizedSerializedSize = -1; public static final int EXPANSION_LEVEL_FIELD_NUMBER = 2; private int expansionLevel_ = 0; /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The enum numeric value on the wire for expansionLevel. */ @java.lang.Override public int getExpansionLevelValue() { return expansionLevel_; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The expansionLevel. */ @java.lang.Override public com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel getExpansionLevel() { com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel result = com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.forNumber(expansionLevel_); return result == null ? com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNRECOGNIZED : result; } public static final int COUNTRY_CODES_FIELD_NUMBER = 3; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return A list containing the countryCodes. */ public com.google.protobuf.ProtocolStringList getCountryCodesList() { return countryCodes_; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return The count of countryCodes. */ public int getCountryCodesCount() { return countryCodes_.size(); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the element to return. * @return The countryCodes at the given index. */ public java.lang.String getCountryCodes(int index) { return countryCodes_.get(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the value to return. * @return The bytes of the countryCodes at the given index. */ public com.google.protobuf.ByteString getCountryCodesBytes(int index) { return countryCodes_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getSeedUserListIdsList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(seedUserListIdsMemoizedSerializedSize); } for (int i = 0; i < seedUserListIds_.size(); i++) { output.writeInt64NoTag(seedUserListIds_.getLong(i)); } if (expansionLevel_ != com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNSPECIFIED.getNumber()) { output.writeEnum(2, expansionLevel_); } for (int i = 0; i < countryCodes_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, countryCodes_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < seedUserListIds_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeInt64SizeNoTag(seedUserListIds_.getLong(i)); } size += dataSize; if (!getSeedUserListIdsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } seedUserListIdsMemoizedSerializedSize = dataSize; } if (expansionLevel_ != com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, expansionLevel_); } { int dataSize = 0; for (int i = 0; i < countryCodes_.size(); i++) { dataSize += computeStringSizeNoTag(countryCodes_.getRaw(i)); } size += dataSize; size += 1 * getCountryCodesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.common.LookalikeUserListInfo)) { return super.equals(obj); } com.google.ads.googleads.v19.common.LookalikeUserListInfo other = (com.google.ads.googleads.v19.common.LookalikeUserListInfo) obj; if (!getSeedUserListIdsList() .equals(other.getSeedUserListIdsList())) return false; if (expansionLevel_ != other.expansionLevel_) return false; if (!getCountryCodesList() .equals(other.getCountryCodesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSeedUserListIdsCount() > 0) { hash = (37 * hash) + SEED_USER_LIST_IDS_FIELD_NUMBER; hash = (53 * hash) + getSeedUserListIdsList().hashCode(); } hash = (37 * hash) + EXPANSION_LEVEL_FIELD_NUMBER; hash = (53 * hash) + expansionLevel_; if (getCountryCodesCount() > 0) { hash = (37 * hash) + COUNTRY_CODES_FIELD_NUMBER; hash = (53 * hash) + getCountryCodesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.common.LookalikeUserListInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * LookalikeUserlist, composed of users similar to those * of a configurable seed (set of UserLists) * </pre> * * Protobuf type {@code google.ads.googleads.v19.common.LookalikeUserListInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.common.LookalikeUserListInfo) com.google.ads.googleads.v19.common.LookalikeUserListInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.common.UserListsProto.internal_static_google_ads_googleads_v19_common_LookalikeUserListInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.common.UserListsProto.internal_static_google_ads_googleads_v19_common_LookalikeUserListInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.common.LookalikeUserListInfo.class, com.google.ads.googleads.v19.common.LookalikeUserListInfo.Builder.class); } // Construct using com.google.ads.googleads.v19.common.LookalikeUserListInfo.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; seedUserListIds_ = emptyLongList(); expansionLevel_ = 0; countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.common.UserListsProto.internal_static_google_ads_googleads_v19_common_LookalikeUserListInfo_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.common.LookalikeUserListInfo getDefaultInstanceForType() { return com.google.ads.googleads.v19.common.LookalikeUserListInfo.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.common.LookalikeUserListInfo build() { com.google.ads.googleads.v19.common.LookalikeUserListInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.common.LookalikeUserListInfo buildPartial() { com.google.ads.googleads.v19.common.LookalikeUserListInfo result = new com.google.ads.googleads.v19.common.LookalikeUserListInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.common.LookalikeUserListInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { seedUserListIds_.makeImmutable(); result.seedUserListIds_ = seedUserListIds_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.expansionLevel_ = expansionLevel_; } if (((from_bitField0_ & 0x00000004) != 0)) { countryCodes_.makeImmutable(); result.countryCodes_ = countryCodes_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.common.LookalikeUserListInfo) { return mergeFrom((com.google.ads.googleads.v19.common.LookalikeUserListInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.common.LookalikeUserListInfo other) { if (other == com.google.ads.googleads.v19.common.LookalikeUserListInfo.getDefaultInstance()) return this; if (!other.seedUserListIds_.isEmpty()) { if (seedUserListIds_.isEmpty()) { seedUserListIds_ = other.seedUserListIds_; seedUserListIds_.makeImmutable(); bitField0_ |= 0x00000001; } else { ensureSeedUserListIdsIsMutable(); seedUserListIds_.addAll(other.seedUserListIds_); } onChanged(); } if (other.expansionLevel_ != 0) { setExpansionLevelValue(other.getExpansionLevelValue()); } if (!other.countryCodes_.isEmpty()) { if (countryCodes_.isEmpty()) { countryCodes_ = other.countryCodes_; bitField0_ |= 0x00000004; } else { ensureCountryCodesIsMutable(); countryCodes_.addAll(other.countryCodes_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { long v = input.readInt64(); ensureSeedUserListIdsIsMutable(); seedUserListIds_.addLong(v); break; } // case 8 case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); ensureSeedUserListIdsIsMutable(); while (input.getBytesUntilLimit() > 0) { seedUserListIds_.addLong(input.readInt64()); } input.popLimit(limit); break; } // case 10 case 16: { expansionLevel_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { java.lang.String s = input.readStringRequireUtf8(); ensureCountryCodesIsMutable(); countryCodes_.add(s); break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Internal.LongList seedUserListIds_ = emptyLongList(); private void ensureSeedUserListIdsIsMutable() { if (!seedUserListIds_.isModifiable()) { seedUserListIds_ = makeMutableCopy(seedUserListIds_); } bitField0_ |= 0x00000001; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return A list containing the seedUserListIds. */ public java.util.List<java.lang.Long> getSeedUserListIdsList() { seedUserListIds_.makeImmutable(); return seedUserListIds_; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return The count of seedUserListIds. */ public int getSeedUserListIdsCount() { return seedUserListIds_.size(); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index of the element to return. * @return The seedUserListIds at the given index. */ public long getSeedUserListIds(int index) { return seedUserListIds_.getLong(index); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index to set the value at. * @param value The seedUserListIds to set. * @return This builder for chaining. */ public Builder setSeedUserListIds( int index, long value) { ensureSeedUserListIdsIsMutable(); seedUserListIds_.setLong(index, value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param value The seedUserListIds to add. * @return This builder for chaining. */ public Builder addSeedUserListIds(long value) { ensureSeedUserListIdsIsMutable(); seedUserListIds_.addLong(value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param values The seedUserListIds to add. * @return This builder for chaining. */ public Builder addAllSeedUserListIds( java.lang.Iterable<? extends java.lang.Long> values) { ensureSeedUserListIdsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, seedUserListIds_); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return This builder for chaining. */ public Builder clearSeedUserListIds() { seedUserListIds_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private int expansionLevel_ = 0; /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The enum numeric value on the wire for expansionLevel. */ @java.lang.Override public int getExpansionLevelValue() { return expansionLevel_; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @param value The enum numeric value on the wire for expansionLevel to set. * @return This builder for chaining. */ public Builder setExpansionLevelValue(int value) { expansionLevel_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The expansionLevel. */ @java.lang.Override public com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel getExpansionLevel() { com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel result = com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.forNumber(expansionLevel_); return result == null ? com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNRECOGNIZED : result; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @param value The expansionLevel to set. * @return This builder for chaining. */ public Builder setExpansionLevel(com.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; expansionLevel_ = value.getNumber(); onChanged(); return this; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v19.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return This builder for chaining. */ public Builder clearExpansionLevel() { bitField0_ = (bitField0_ & ~0x00000002); expansionLevel_ = 0; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureCountryCodesIsMutable() { if (!countryCodes_.isModifiable()) { countryCodes_ = new com.google.protobuf.LazyStringArrayList(countryCodes_); } bitField0_ |= 0x00000004; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return A list containing the countryCodes. */ public com.google.protobuf.ProtocolStringList getCountryCodesList() { countryCodes_.makeImmutable(); return countryCodes_; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return The count of countryCodes. */ public int getCountryCodesCount() { return countryCodes_.size(); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the element to return. * @return The countryCodes at the given index. */ public java.lang.String getCountryCodes(int index) { return countryCodes_.get(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the value to return. * @return The bytes of the countryCodes at the given index. */ public com.google.protobuf.ByteString getCountryCodesBytes(int index) { return countryCodes_.getByteString(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index to set the value at. * @param value The countryCodes to set. * @return This builder for chaining. */ public Builder setCountryCodes( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCountryCodesIsMutable(); countryCodes_.set(index, value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param value The countryCodes to add. * @return This builder for chaining. */ public Builder addCountryCodes( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCountryCodesIsMutable(); countryCodes_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param values The countryCodes to add. * @return This builder for chaining. */ public Builder addAllCountryCodes( java.lang.Iterable<java.lang.String> values) { ensureCountryCodesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, countryCodes_); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return This builder for chaining. */ public Builder clearCountryCodes() { countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000004);; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param value The bytes of the countryCodes to add. * @return This builder for chaining. */ public Builder addCountryCodesBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureCountryCodesIsMutable(); countryCodes_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.common.LookalikeUserListInfo) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.common.LookalikeUserListInfo) private static final com.google.ads.googleads.v19.common.LookalikeUserListInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.common.LookalikeUserListInfo(); } public static com.google.ads.googleads.v19.common.LookalikeUserListInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LookalikeUserListInfo> PARSER = new com.google.protobuf.AbstractParser<LookalikeUserListInfo>() { @java.lang.Override public LookalikeUserListInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<LookalikeUserListInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LookalikeUserListInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.common.LookalikeUserListInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,656
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/common/LookalikeUserListInfo.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/common/user_lists.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.common; /** * <pre> * LookalikeUserlist, composed of users similar to those * of a configurable seed (set of UserLists) * </pre> * * Protobuf type {@code google.ads.googleads.v20.common.LookalikeUserListInfo} */ public final class LookalikeUserListInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.common.LookalikeUserListInfo) LookalikeUserListInfoOrBuilder { private static final long serialVersionUID = 0L; // Use LookalikeUserListInfo.newBuilder() to construct. private LookalikeUserListInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LookalikeUserListInfo() { seedUserListIds_ = emptyLongList(); expansionLevel_ = 0; countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new LookalikeUserListInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.common.UserListsProto.internal_static_google_ads_googleads_v20_common_LookalikeUserListInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.common.UserListsProto.internal_static_google_ads_googleads_v20_common_LookalikeUserListInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.common.LookalikeUserListInfo.class, com.google.ads.googleads.v20.common.LookalikeUserListInfo.Builder.class); } public static final int SEED_USER_LIST_IDS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private com.google.protobuf.Internal.LongList seedUserListIds_ = emptyLongList(); /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return A list containing the seedUserListIds. */ @java.lang.Override public java.util.List<java.lang.Long> getSeedUserListIdsList() { return seedUserListIds_; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return The count of seedUserListIds. */ public int getSeedUserListIdsCount() { return seedUserListIds_.size(); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index of the element to return. * @return The seedUserListIds at the given index. */ public long getSeedUserListIds(int index) { return seedUserListIds_.getLong(index); } private int seedUserListIdsMemoizedSerializedSize = -1; public static final int EXPANSION_LEVEL_FIELD_NUMBER = 2; private int expansionLevel_ = 0; /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The enum numeric value on the wire for expansionLevel. */ @java.lang.Override public int getExpansionLevelValue() { return expansionLevel_; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The expansionLevel. */ @java.lang.Override public com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel getExpansionLevel() { com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel result = com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.forNumber(expansionLevel_); return result == null ? com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNRECOGNIZED : result; } public static final int COUNTRY_CODES_FIELD_NUMBER = 3; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return A list containing the countryCodes. */ public com.google.protobuf.ProtocolStringList getCountryCodesList() { return countryCodes_; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return The count of countryCodes. */ public int getCountryCodesCount() { return countryCodes_.size(); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the element to return. * @return The countryCodes at the given index. */ public java.lang.String getCountryCodes(int index) { return countryCodes_.get(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the value to return. * @return The bytes of the countryCodes at the given index. */ public com.google.protobuf.ByteString getCountryCodesBytes(int index) { return countryCodes_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getSeedUserListIdsList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(seedUserListIdsMemoizedSerializedSize); } for (int i = 0; i < seedUserListIds_.size(); i++) { output.writeInt64NoTag(seedUserListIds_.getLong(i)); } if (expansionLevel_ != com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNSPECIFIED.getNumber()) { output.writeEnum(2, expansionLevel_); } for (int i = 0; i < countryCodes_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, countryCodes_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < seedUserListIds_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeInt64SizeNoTag(seedUserListIds_.getLong(i)); } size += dataSize; if (!getSeedUserListIdsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } seedUserListIdsMemoizedSerializedSize = dataSize; } if (expansionLevel_ != com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, expansionLevel_); } { int dataSize = 0; for (int i = 0; i < countryCodes_.size(); i++) { dataSize += computeStringSizeNoTag(countryCodes_.getRaw(i)); } size += dataSize; size += 1 * getCountryCodesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.common.LookalikeUserListInfo)) { return super.equals(obj); } com.google.ads.googleads.v20.common.LookalikeUserListInfo other = (com.google.ads.googleads.v20.common.LookalikeUserListInfo) obj; if (!getSeedUserListIdsList() .equals(other.getSeedUserListIdsList())) return false; if (expansionLevel_ != other.expansionLevel_) return false; if (!getCountryCodesList() .equals(other.getCountryCodesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSeedUserListIdsCount() > 0) { hash = (37 * hash) + SEED_USER_LIST_IDS_FIELD_NUMBER; hash = (53 * hash) + getSeedUserListIdsList().hashCode(); } hash = (37 * hash) + EXPANSION_LEVEL_FIELD_NUMBER; hash = (53 * hash) + expansionLevel_; if (getCountryCodesCount() > 0) { hash = (37 * hash) + COUNTRY_CODES_FIELD_NUMBER; hash = (53 * hash) + getCountryCodesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.common.LookalikeUserListInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * LookalikeUserlist, composed of users similar to those * of a configurable seed (set of UserLists) * </pre> * * Protobuf type {@code google.ads.googleads.v20.common.LookalikeUserListInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.common.LookalikeUserListInfo) com.google.ads.googleads.v20.common.LookalikeUserListInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.common.UserListsProto.internal_static_google_ads_googleads_v20_common_LookalikeUserListInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.common.UserListsProto.internal_static_google_ads_googleads_v20_common_LookalikeUserListInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.common.LookalikeUserListInfo.class, com.google.ads.googleads.v20.common.LookalikeUserListInfo.Builder.class); } // Construct using com.google.ads.googleads.v20.common.LookalikeUserListInfo.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; seedUserListIds_ = emptyLongList(); expansionLevel_ = 0; countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.common.UserListsProto.internal_static_google_ads_googleads_v20_common_LookalikeUserListInfo_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.common.LookalikeUserListInfo getDefaultInstanceForType() { return com.google.ads.googleads.v20.common.LookalikeUserListInfo.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.common.LookalikeUserListInfo build() { com.google.ads.googleads.v20.common.LookalikeUserListInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.common.LookalikeUserListInfo buildPartial() { com.google.ads.googleads.v20.common.LookalikeUserListInfo result = new com.google.ads.googleads.v20.common.LookalikeUserListInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.common.LookalikeUserListInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { seedUserListIds_.makeImmutable(); result.seedUserListIds_ = seedUserListIds_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.expansionLevel_ = expansionLevel_; } if (((from_bitField0_ & 0x00000004) != 0)) { countryCodes_.makeImmutable(); result.countryCodes_ = countryCodes_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.common.LookalikeUserListInfo) { return mergeFrom((com.google.ads.googleads.v20.common.LookalikeUserListInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.common.LookalikeUserListInfo other) { if (other == com.google.ads.googleads.v20.common.LookalikeUserListInfo.getDefaultInstance()) return this; if (!other.seedUserListIds_.isEmpty()) { if (seedUserListIds_.isEmpty()) { seedUserListIds_ = other.seedUserListIds_; seedUserListIds_.makeImmutable(); bitField0_ |= 0x00000001; } else { ensureSeedUserListIdsIsMutable(); seedUserListIds_.addAll(other.seedUserListIds_); } onChanged(); } if (other.expansionLevel_ != 0) { setExpansionLevelValue(other.getExpansionLevelValue()); } if (!other.countryCodes_.isEmpty()) { if (countryCodes_.isEmpty()) { countryCodes_ = other.countryCodes_; bitField0_ |= 0x00000004; } else { ensureCountryCodesIsMutable(); countryCodes_.addAll(other.countryCodes_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { long v = input.readInt64(); ensureSeedUserListIdsIsMutable(); seedUserListIds_.addLong(v); break; } // case 8 case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); ensureSeedUserListIdsIsMutable(); while (input.getBytesUntilLimit() > 0) { seedUserListIds_.addLong(input.readInt64()); } input.popLimit(limit); break; } // case 10 case 16: { expansionLevel_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { java.lang.String s = input.readStringRequireUtf8(); ensureCountryCodesIsMutable(); countryCodes_.add(s); break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Internal.LongList seedUserListIds_ = emptyLongList(); private void ensureSeedUserListIdsIsMutable() { if (!seedUserListIds_.isModifiable()) { seedUserListIds_ = makeMutableCopy(seedUserListIds_); } bitField0_ |= 0x00000001; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return A list containing the seedUserListIds. */ public java.util.List<java.lang.Long> getSeedUserListIdsList() { seedUserListIds_.makeImmutable(); return seedUserListIds_; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return The count of seedUserListIds. */ public int getSeedUserListIdsCount() { return seedUserListIds_.size(); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index of the element to return. * @return The seedUserListIds at the given index. */ public long getSeedUserListIds(int index) { return seedUserListIds_.getLong(index); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index to set the value at. * @param value The seedUserListIds to set. * @return This builder for chaining. */ public Builder setSeedUserListIds( int index, long value) { ensureSeedUserListIdsIsMutable(); seedUserListIds_.setLong(index, value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param value The seedUserListIds to add. * @return This builder for chaining. */ public Builder addSeedUserListIds(long value) { ensureSeedUserListIdsIsMutable(); seedUserListIds_.addLong(value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param values The seedUserListIds to add. * @return This builder for chaining. */ public Builder addAllSeedUserListIds( java.lang.Iterable<? extends java.lang.Long> values) { ensureSeedUserListIdsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, seedUserListIds_); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return This builder for chaining. */ public Builder clearSeedUserListIds() { seedUserListIds_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private int expansionLevel_ = 0; /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The enum numeric value on the wire for expansionLevel. */ @java.lang.Override public int getExpansionLevelValue() { return expansionLevel_; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @param value The enum numeric value on the wire for expansionLevel to set. * @return This builder for chaining. */ public Builder setExpansionLevelValue(int value) { expansionLevel_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The expansionLevel. */ @java.lang.Override public com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel getExpansionLevel() { com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel result = com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.forNumber(expansionLevel_); return result == null ? com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNRECOGNIZED : result; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @param value The expansionLevel to set. * @return This builder for chaining. */ public Builder setExpansionLevel(com.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; expansionLevel_ = value.getNumber(); onChanged(); return this; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v20.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return This builder for chaining. */ public Builder clearExpansionLevel() { bitField0_ = (bitField0_ & ~0x00000002); expansionLevel_ = 0; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureCountryCodesIsMutable() { if (!countryCodes_.isModifiable()) { countryCodes_ = new com.google.protobuf.LazyStringArrayList(countryCodes_); } bitField0_ |= 0x00000004; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return A list containing the countryCodes. */ public com.google.protobuf.ProtocolStringList getCountryCodesList() { countryCodes_.makeImmutable(); return countryCodes_; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return The count of countryCodes. */ public int getCountryCodesCount() { return countryCodes_.size(); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the element to return. * @return The countryCodes at the given index. */ public java.lang.String getCountryCodes(int index) { return countryCodes_.get(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the value to return. * @return The bytes of the countryCodes at the given index. */ public com.google.protobuf.ByteString getCountryCodesBytes(int index) { return countryCodes_.getByteString(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index to set the value at. * @param value The countryCodes to set. * @return This builder for chaining. */ public Builder setCountryCodes( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCountryCodesIsMutable(); countryCodes_.set(index, value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param value The countryCodes to add. * @return This builder for chaining. */ public Builder addCountryCodes( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCountryCodesIsMutable(); countryCodes_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param values The countryCodes to add. * @return This builder for chaining. */ public Builder addAllCountryCodes( java.lang.Iterable<java.lang.String> values) { ensureCountryCodesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, countryCodes_); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return This builder for chaining. */ public Builder clearCountryCodes() { countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000004);; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param value The bytes of the countryCodes to add. * @return This builder for chaining. */ public Builder addCountryCodesBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureCountryCodesIsMutable(); countryCodes_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.common.LookalikeUserListInfo) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.common.LookalikeUserListInfo) private static final com.google.ads.googleads.v20.common.LookalikeUserListInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.common.LookalikeUserListInfo(); } public static com.google.ads.googleads.v20.common.LookalikeUserListInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LookalikeUserListInfo> PARSER = new com.google.protobuf.AbstractParser<LookalikeUserListInfo>() { @java.lang.Override public LookalikeUserListInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<LookalikeUserListInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LookalikeUserListInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.common.LookalikeUserListInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,656
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/LookalikeUserListInfo.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/common/user_lists.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.common; /** * <pre> * LookalikeUserlist, composed of users similar to those * of a configurable seed (set of UserLists) * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.LookalikeUserListInfo} */ public final class LookalikeUserListInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.LookalikeUserListInfo) LookalikeUserListInfoOrBuilder { private static final long serialVersionUID = 0L; // Use LookalikeUserListInfo.newBuilder() to construct. private LookalikeUserListInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LookalikeUserListInfo() { seedUserListIds_ = emptyLongList(); expansionLevel_ = 0; countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new LookalikeUserListInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.UserListsProto.internal_static_google_ads_googleads_v21_common_LookalikeUserListInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.UserListsProto.internal_static_google_ads_googleads_v21_common_LookalikeUserListInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.LookalikeUserListInfo.class, com.google.ads.googleads.v21.common.LookalikeUserListInfo.Builder.class); } public static final int SEED_USER_LIST_IDS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private com.google.protobuf.Internal.LongList seedUserListIds_ = emptyLongList(); /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return A list containing the seedUserListIds. */ @java.lang.Override public java.util.List<java.lang.Long> getSeedUserListIdsList() { return seedUserListIds_; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return The count of seedUserListIds. */ public int getSeedUserListIdsCount() { return seedUserListIds_.size(); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index of the element to return. * @return The seedUserListIds at the given index. */ public long getSeedUserListIds(int index) { return seedUserListIds_.getLong(index); } private int seedUserListIdsMemoizedSerializedSize = -1; public static final int EXPANSION_LEVEL_FIELD_NUMBER = 2; private int expansionLevel_ = 0; /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The enum numeric value on the wire for expansionLevel. */ @java.lang.Override public int getExpansionLevelValue() { return expansionLevel_; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The expansionLevel. */ @java.lang.Override public com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel getExpansionLevel() { com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel result = com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.forNumber(expansionLevel_); return result == null ? com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNRECOGNIZED : result; } public static final int COUNTRY_CODES_FIELD_NUMBER = 3; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return A list containing the countryCodes. */ public com.google.protobuf.ProtocolStringList getCountryCodesList() { return countryCodes_; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return The count of countryCodes. */ public int getCountryCodesCount() { return countryCodes_.size(); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the element to return. * @return The countryCodes at the given index. */ public java.lang.String getCountryCodes(int index) { return countryCodes_.get(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the value to return. * @return The bytes of the countryCodes at the given index. */ public com.google.protobuf.ByteString getCountryCodesBytes(int index) { return countryCodes_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getSeedUserListIdsList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(seedUserListIdsMemoizedSerializedSize); } for (int i = 0; i < seedUserListIds_.size(); i++) { output.writeInt64NoTag(seedUserListIds_.getLong(i)); } if (expansionLevel_ != com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNSPECIFIED.getNumber()) { output.writeEnum(2, expansionLevel_); } for (int i = 0; i < countryCodes_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, countryCodes_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < seedUserListIds_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeInt64SizeNoTag(seedUserListIds_.getLong(i)); } size += dataSize; if (!getSeedUserListIdsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } seedUserListIdsMemoizedSerializedSize = dataSize; } if (expansionLevel_ != com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, expansionLevel_); } { int dataSize = 0; for (int i = 0; i < countryCodes_.size(); i++) { dataSize += computeStringSizeNoTag(countryCodes_.getRaw(i)); } size += dataSize; size += 1 * getCountryCodesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.common.LookalikeUserListInfo)) { return super.equals(obj); } com.google.ads.googleads.v21.common.LookalikeUserListInfo other = (com.google.ads.googleads.v21.common.LookalikeUserListInfo) obj; if (!getSeedUserListIdsList() .equals(other.getSeedUserListIdsList())) return false; if (expansionLevel_ != other.expansionLevel_) return false; if (!getCountryCodesList() .equals(other.getCountryCodesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSeedUserListIdsCount() > 0) { hash = (37 * hash) + SEED_USER_LIST_IDS_FIELD_NUMBER; hash = (53 * hash) + getSeedUserListIdsList().hashCode(); } hash = (37 * hash) + EXPANSION_LEVEL_FIELD_NUMBER; hash = (53 * hash) + expansionLevel_; if (getCountryCodesCount() > 0) { hash = (37 * hash) + COUNTRY_CODES_FIELD_NUMBER; hash = (53 * hash) + getCountryCodesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.common.LookalikeUserListInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * LookalikeUserlist, composed of users similar to those * of a configurable seed (set of UserLists) * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.LookalikeUserListInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.LookalikeUserListInfo) com.google.ads.googleads.v21.common.LookalikeUserListInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.UserListsProto.internal_static_google_ads_googleads_v21_common_LookalikeUserListInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.UserListsProto.internal_static_google_ads_googleads_v21_common_LookalikeUserListInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.LookalikeUserListInfo.class, com.google.ads.googleads.v21.common.LookalikeUserListInfo.Builder.class); } // Construct using com.google.ads.googleads.v21.common.LookalikeUserListInfo.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; seedUserListIds_ = emptyLongList(); expansionLevel_ = 0; countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.common.UserListsProto.internal_static_google_ads_googleads_v21_common_LookalikeUserListInfo_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.common.LookalikeUserListInfo getDefaultInstanceForType() { return com.google.ads.googleads.v21.common.LookalikeUserListInfo.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.common.LookalikeUserListInfo build() { com.google.ads.googleads.v21.common.LookalikeUserListInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.common.LookalikeUserListInfo buildPartial() { com.google.ads.googleads.v21.common.LookalikeUserListInfo result = new com.google.ads.googleads.v21.common.LookalikeUserListInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.common.LookalikeUserListInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { seedUserListIds_.makeImmutable(); result.seedUserListIds_ = seedUserListIds_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.expansionLevel_ = expansionLevel_; } if (((from_bitField0_ & 0x00000004) != 0)) { countryCodes_.makeImmutable(); result.countryCodes_ = countryCodes_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.common.LookalikeUserListInfo) { return mergeFrom((com.google.ads.googleads.v21.common.LookalikeUserListInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.common.LookalikeUserListInfo other) { if (other == com.google.ads.googleads.v21.common.LookalikeUserListInfo.getDefaultInstance()) return this; if (!other.seedUserListIds_.isEmpty()) { if (seedUserListIds_.isEmpty()) { seedUserListIds_ = other.seedUserListIds_; seedUserListIds_.makeImmutable(); bitField0_ |= 0x00000001; } else { ensureSeedUserListIdsIsMutable(); seedUserListIds_.addAll(other.seedUserListIds_); } onChanged(); } if (other.expansionLevel_ != 0) { setExpansionLevelValue(other.getExpansionLevelValue()); } if (!other.countryCodes_.isEmpty()) { if (countryCodes_.isEmpty()) { countryCodes_ = other.countryCodes_; bitField0_ |= 0x00000004; } else { ensureCountryCodesIsMutable(); countryCodes_.addAll(other.countryCodes_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { long v = input.readInt64(); ensureSeedUserListIdsIsMutable(); seedUserListIds_.addLong(v); break; } // case 8 case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); ensureSeedUserListIdsIsMutable(); while (input.getBytesUntilLimit() > 0) { seedUserListIds_.addLong(input.readInt64()); } input.popLimit(limit); break; } // case 10 case 16: { expansionLevel_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { java.lang.String s = input.readStringRequireUtf8(); ensureCountryCodesIsMutable(); countryCodes_.add(s); break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Internal.LongList seedUserListIds_ = emptyLongList(); private void ensureSeedUserListIdsIsMutable() { if (!seedUserListIds_.isModifiable()) { seedUserListIds_ = makeMutableCopy(seedUserListIds_); } bitField0_ |= 0x00000001; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return A list containing the seedUserListIds. */ public java.util.List<java.lang.Long> getSeedUserListIdsList() { seedUserListIds_.makeImmutable(); return seedUserListIds_; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return The count of seedUserListIds. */ public int getSeedUserListIdsCount() { return seedUserListIds_.size(); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index of the element to return. * @return The seedUserListIds at the given index. */ public long getSeedUserListIds(int index) { return seedUserListIds_.getLong(index); } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param index The index to set the value at. * @param value The seedUserListIds to set. * @return This builder for chaining. */ public Builder setSeedUserListIds( int index, long value) { ensureSeedUserListIdsIsMutable(); seedUserListIds_.setLong(index, value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param value The seedUserListIds to add. * @return This builder for chaining. */ public Builder addSeedUserListIds(long value) { ensureSeedUserListIdsIsMutable(); seedUserListIds_.addLong(value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @param values The seedUserListIds to add. * @return This builder for chaining. */ public Builder addAllSeedUserListIds( java.lang.Iterable<? extends java.lang.Long> values) { ensureSeedUserListIdsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, seedUserListIds_); bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Seed UserList ID from which this list is derived, provided by user. * </pre> * * <code>repeated int64 seed_user_list_ids = 1;</code> * @return This builder for chaining. */ public Builder clearSeedUserListIds() { seedUserListIds_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private int expansionLevel_ = 0; /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The enum numeric value on the wire for expansionLevel. */ @java.lang.Override public int getExpansionLevelValue() { return expansionLevel_; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @param value The enum numeric value on the wire for expansionLevel to set. * @return This builder for chaining. */ public Builder setExpansionLevelValue(int value) { expansionLevel_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return The expansionLevel. */ @java.lang.Override public com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel getExpansionLevel() { com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel result = com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.forNumber(expansionLevel_); return result == null ? com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel.UNRECOGNIZED : result; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @param value The expansionLevel to set. * @return This builder for chaining. */ public Builder setExpansionLevel(com.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; expansionLevel_ = value.getNumber(); onChanged(); return this; } /** * <pre> * Expansion level, reflecting the size of the lookalike audience * </pre> * * <code>.google.ads.googleads.v21.enums.LookalikeExpansionLevelEnum.LookalikeExpansionLevel expansion_level = 2;</code> * @return This builder for chaining. */ public Builder clearExpansionLevel() { bitField0_ = (bitField0_ & ~0x00000002); expansionLevel_ = 0; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureCountryCodesIsMutable() { if (!countryCodes_.isModifiable()) { countryCodes_ = new com.google.protobuf.LazyStringArrayList(countryCodes_); } bitField0_ |= 0x00000004; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return A list containing the countryCodes. */ public com.google.protobuf.ProtocolStringList getCountryCodesList() { countryCodes_.makeImmutable(); return countryCodes_; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return The count of countryCodes. */ public int getCountryCodesCount() { return countryCodes_.size(); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the element to return. * @return The countryCodes at the given index. */ public java.lang.String getCountryCodes(int index) { return countryCodes_.get(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index of the value to return. * @return The bytes of the countryCodes at the given index. */ public com.google.protobuf.ByteString getCountryCodesBytes(int index) { return countryCodes_.getByteString(index); } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param index The index to set the value at. * @param value The countryCodes to set. * @return This builder for chaining. */ public Builder setCountryCodes( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCountryCodesIsMutable(); countryCodes_.set(index, value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param value The countryCodes to add. * @return This builder for chaining. */ public Builder addCountryCodes( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCountryCodesIsMutable(); countryCodes_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param values The countryCodes to add. * @return This builder for chaining. */ public Builder addAllCountryCodes( java.lang.Iterable<java.lang.String> values) { ensureCountryCodesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, countryCodes_); bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @return This builder for chaining. */ public Builder clearCountryCodes() { countryCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000004);; onChanged(); return this; } /** * <pre> * Countries targeted by the Lookalike. Two-letter country code as defined by * ISO-3166 * </pre> * * <code>repeated string country_codes = 3;</code> * @param value The bytes of the countryCodes to add. * @return This builder for chaining. */ public Builder addCountryCodesBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureCountryCodesIsMutable(); countryCodes_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.LookalikeUserListInfo) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.LookalikeUserListInfo) private static final com.google.ads.googleads.v21.common.LookalikeUserListInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.LookalikeUserListInfo(); } public static com.google.ads.googleads.v21.common.LookalikeUserListInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LookalikeUserListInfo> PARSER = new com.google.protobuf.AbstractParser<LookalikeUserListInfo>() { @java.lang.Override public LookalikeUserListInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<LookalikeUserListInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LookalikeUserListInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.common.LookalikeUserListInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/oozie
35,589
core/src/main/java/org/apache/oozie/action/ssh/SshActionExecutor.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.action.ssh; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import org.apache.hadoop.util.StringUtils; import org.apache.oozie.ErrorCode; import org.apache.oozie.action.ActionExecutor; import org.apache.oozie.action.ActionExecutorException; import org.apache.oozie.client.OozieClient; import org.apache.oozie.client.WorkflowAction; import org.apache.oozie.client.WorkflowAction.Status; import org.apache.oozie.service.CallbackService; import org.apache.oozie.service.ConfigurationService; import org.apache.oozie.service.Services; import org.apache.oozie.servlet.CallbackServlet; import org.apache.oozie.util.BufferDrainer; import org.apache.oozie.util.IOUtils; import org.apache.oozie.util.PropertiesUtils; import org.apache.oozie.util.XLog; import org.apache.oozie.util.XmlUtils; import org.jdom2.Element; import org.jdom2.JDOMException; import org.jdom2.Namespace; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * Ssh action executor. <ul> <li>Execute the shell commands on the remote host</li> <li>Copies the base and wrapper * scripts on to the remote location</li> <li>Base script is used to run the command on the remote host</li> <li>Wrapper * script is used to check the status of the submitted command</li> <li>handles the submission failures</li> </ul> */ public class SshActionExecutor extends ActionExecutor { public static final String ACTION_TYPE = "ssh"; /** * Configuration parameter which specifies whether the specified ssh user is allowed, or has to be the job user. */ public static final String CONF_SSH_ALLOW_USER_AT_HOST = CONF_PREFIX + "ssh.allow.user.at.host"; protected static final String SSH_COMMAND_OPTIONS = "-o PasswordAuthentication=no -o KbdInteractiveDevices=no -o StrictHostKeyChecking=no -o ConnectTimeout=20 "; protected static final String SSH_COMMAND_BASE = "ssh " + SSH_COMMAND_OPTIONS; protected static final String SCP_COMMAND_BASE = "scp " + SSH_COMMAND_OPTIONS; public static final String ERR_SETUP_FAILED = "SETUP_FAILED"; public static final String ERR_EXECUTION_FAILED = "EXECUTION_FAILED"; public static final String ERR_UNKNOWN_ERROR = "UNKNOWN_ERROR"; public static final String ERR_COULD_NOT_CONNECT = "COULD_NOT_CONNECT"; public static final String ERR_HOST_RESOLUTION = "COULD_NOT_RESOLVE_HOST"; public static final String ERR_FNF = "FNF"; public static final String ERR_AUTH_FAILED = "AUTH_FAILED"; public static final String ERR_NO_EXEC_PERM = "NO_EXEC_PERM"; public static final String ERR_USER_MISMATCH = "ERR_USER_MISMATCH"; public static final String ERR_EXCEDE_LEN = "ERR_OUTPUT_EXCEED_MAX_LEN"; public static final String DELETE_TMP_DIR = "oozie.action.ssh.delete.remote.tmp.dir"; public static final String HTTP_COMMAND = "oozie.action.ssh.http.command"; public static final String HTTP_COMMAND_OPTIONS = "oozie.action.ssh.http.command.post.options"; public static final String CHECK_MAX_RETRIES = "oozie.action.ssh.check.retries.max"; public static final String CHECK_INITIAL_RETRY_WAIT_TIME = "oozie.action.ssh.check.initial.retry.wait.time"; private static final String EXT_STATUS_VAR = "#status"; private static final int SSH_CONNECT_ERROR_CODE = 255; private static int maxLen; private static boolean allowSshUserAtHost; private final XLog LOG = XLog.getLog(getClass()) ; protected SshActionExecutor() { super(ACTION_TYPE); } /** * Initialize Action. */ @Override public void initActionType() { super.initActionType(); maxLen = getOozieConf().getInt(CallbackServlet.CONF_MAX_DATA_LEN, 2 * 1024); allowSshUserAtHost = ConfigurationService.getBoolean(CONF_SSH_ALLOW_USER_AT_HOST); registerError(InterruptedException.class.getName(), ActionExecutorException.ErrorType.ERROR, "SH001"); registerError(JDOMException.class.getName(), ActionExecutorException.ErrorType.ERROR, "SH002"); initSshScripts(); } /** * Check ssh action status. * * @param context action execution context. * @param action action object. * @throws org.apache.oozie.action.ActionExecutorException in case if action cannot be executed */ @SuppressFBWarnings(value = {"COMMAND_INJECTION", "PATH_TRAVERSAL_OUT"}, justification = "Tracker URI is specified in the WF action, and action dir path is from context") @Override public void check(Context context, WorkflowAction action) throws ActionExecutorException { LOG.trace("check() start for action={0}", action.getId()); Status status = getActionStatus(context, action); boolean captureOutput; try { Element eConf = XmlUtils.parseXml(action.getConf()); Namespace ns = eConf.getNamespace(); captureOutput = eConf.getChild("capture-output", ns) != null; } catch (JDOMException ex) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "ERR_XML_PARSE_FAILED", "unknown error", ex); } LOG.debug("Capture Output: {0}", captureOutput); if (status == Status.OK) { if (captureOutput) { String outFile = getRemoteFileName(context, action, "stdout", false, true); String dataCommand = SSH_COMMAND_BASE + action.getTrackerUri() + " cat " + outFile; LOG.debug("Ssh command [{0}]", dataCommand); try { final StringBuffer outBuffer = getActionOutputMessage(dataCommand); context.setExecutionData(status.toString(), PropertiesUtils.stringToProperties(outBuffer.toString())); LOG.trace("Execution data set. status={0}, properties={1}", status, PropertiesUtils.stringToProperties(outBuffer.toString())); } catch (Exception ex) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "ERR_UNKNOWN_ERROR", "unknown error", ex); } } else { LOG.trace("Execution data set to null. status={0}", status); context.setExecutionData(status.toString(), null); } } else { if (status == Status.ERROR) { LOG.warn("Execution data set to null in ERROR"); context.setExecutionData(status.toString(), null); String actionErrorMsg = getActionErrorMessage(context, action); LOG.warn("{0}: Script failed on remote host with [{1}]", ErrorCode.E1111, actionErrorMsg); context.setErrorInfo(ErrorCode.E1111.toString(), actionErrorMsg); } else { LOG.warn("Execution data not set"); context.setExternalStatus(status.toString()); } } LOG.trace("check() end for action={0}", action); } private StringBuffer getActionOutputMessage(String dataCommand) throws IOException, ActionExecutorException { final Process process = Runtime.getRuntime().exec(dataCommand.split("\\s")); boolean overflow = false; final BufferDrainer bufferDrainer = new BufferDrainer(process, maxLen); bufferDrainer.drainBuffers(); final StringBuffer outBuffer = bufferDrainer.getInputBuffer(); final StringBuffer errBuffer = bufferDrainer.getErrorBuffer(); LOG.debug("outBuffer={0}", outBuffer); LOG.debug("errBuffer={0}", errBuffer); if (outBuffer.length() > maxLen) { overflow = true; } if (overflow) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "ERR_OUTPUT_EXCEED_MAX_LEN", "unknown error"); } return outBuffer; } private String getActionErrorMessage(Context context, WorkflowAction action) throws ActionExecutorException { String outFile = getRemoteFileName(context, action, "error", false, true); String errorMsgCmd = SSH_COMMAND_BASE + action.getTrackerUri() + " cat " + outFile; LOG.debug("Get error message command: [{0}]", errorMsgCmd); String errorMessage; try { final StringBuffer outBuffer = getActionOutputMessage(errorMsgCmd); errorMessage = outBuffer.toString().replaceAll("\n", ""); } catch (Exception ex) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "ERR_UNKNOWN_ERROR", "unknown error", ex); } return errorMessage; } /** * Kill ssh action. * * @param context action execution context. * @param action object. * @throws org.apache.oozie.action.ActionExecutorException in case if action cannot be executed */ @Override public void kill(Context context, WorkflowAction action) throws ActionExecutorException { LOG.info("Killing action"); String command = "ssh " + action.getTrackerUri() + " kill -KILL " + action.getExternalId(); int returnValue = getReturnValue(command); if (returnValue != 0) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FAILED_TO_KILL", XLog.format( "Unable to kill process {0} on {1}", action.getExternalId(), action.getTrackerUri())); } context.setEndData(WorkflowAction.Status.KILLED, "ERROR"); } /** * Start the ssh action execution. * * @param context action execution context. * @param action action object. * @throws org.apache.oozie.action.ActionExecutorException in case if action cannot be executed */ @SuppressWarnings("unchecked") @Override public void start(final Context context, final WorkflowAction action) throws ActionExecutorException { LOG.info("Starting action"); String confStr = action.getConf(); Element conf; try { conf = XmlUtils.parseXml(confStr); } catch (Exception ex) { throw convertException(ex); } Namespace nameSpace = conf.getNamespace(); Element hostElement = conf.getChild("host", nameSpace); String hostString = hostElement.getValue().trim(); hostString = prepareUserHost(hostString, context); final String host = hostString; final String dirLocation = execute(new Callable<String>() { public String call() throws Exception { return setupRemote(host, context, action); } }); String runningPid = execute(new Callable<String>() { public String call() throws Exception { return checkIfRunning(host, context, action); } }); String pid = ""; LOG.trace("runningPid={0}", runningPid); if (runningPid == null) { final Element commandElement = conf.getChild("command", nameSpace); final boolean ignoreOutput = conf.getChild("capture-output", nameSpace) == null; boolean preserve = false; if (commandElement != null) { String[] args = null; // Will either have <args>, <arg>, or neither (but not both) List<Element> argsList = conf.getChildren("args", nameSpace); // Arguments in an <args> are "flattened" (spaces are delimiters) if (argsList != null && argsList.size() > 0) { StringBuilder argsString = new StringBuilder(""); for (Element argsElement : argsList) { argsString = argsString.append(argsElement.getValue()).append(" "); } args = new String[]{argsString.toString()}; } else { // Arguments in an <arg> are preserved, even with spaces argsList = conf.getChildren("arg", nameSpace); if (argsList != null && argsList.size() > 0) { preserve = true; args = new String[argsList.size()]; for (int i = 0; i < argsList.size(); i++) { Element argsElement = argsList.get(i); args[i] = argsElement.getValue(); // Even though we're keeping the args as an array, if they contain a space we still have to either quote // them or escape their space (because the scripts will split them up otherwise) if (args[i].contains(" ") && !(args[i].startsWith("\"") && args[i].endsWith("\"") || args[i].startsWith("'") && args[i].endsWith("'"))) { args[i] = StringUtils.escapeString(args[i], '\\', ' '); } } } } final String[] argsF = args; final String recoveryId = context.getRecoveryId(); final boolean preserveF = preserve; pid = execute(new Callable<String>() { @Override public String call() throws Exception { return doExecute(host, dirLocation, commandElement.getValue(), argsF, ignoreOutput, action, recoveryId, preserveF); } }); } context.setStartData(pid, host, host); } else { pid = runningPid; context.setStartData(pid, host, host); check(context, action); } } private String checkIfRunning(String host, final Context context, final WorkflowAction action) { String outFile = getRemoteFileName(context, action, "pid", false, false); String getOutputCmd = SSH_COMMAND_BASE + host + " cat " + outFile; try { final Process process = Runtime.getRuntime().exec(getOutputCmd.split("\\s")); final BufferDrainer bufferDrainer = new BufferDrainer(process, maxLen); bufferDrainer.drainBuffers(); final StringBuffer buffer = bufferDrainer.getInputBuffer(); String pid = getFirstLine(buffer); if (Long.valueOf(pid) > 0) { return pid; } else { return null; } } catch (Exception e) { return null; } } /** * Get remote host working location. * * @param context action execution context * @param action Action * @param fileExtension Extension to be added to file name * @param dirOnly Get the Directory only * @param useExtId Flag to use external ID in the path * @return remote host file name/Directory. */ public String getRemoteFileName(Context context, WorkflowAction action, String fileExtension, boolean dirOnly, boolean useExtId) { String path = getActionDirPath(context.getWorkflow().getId(), action, ACTION_TYPE, false) + "/"; if (dirOnly) { return path; } if (useExtId) { path = path + action.getExternalId() + "."; } path = path + context.getRecoveryId() + "." + fileExtension; return path; } /** * Utility method to execute command. * * @param command Command to execute as String. * @return exit status of the execution. * @throws IOException if processSettings exits with status nonzero. * @throws InterruptedException if processSettings does not run properly. */ public int executeCommand(String command) throws IOException, InterruptedException { Runtime runtime = Runtime.getRuntime(); Process p = runtime.exec(command.split("\\s")); final BufferDrainer bufferDrainer = new BufferDrainer(p, maxLen); final int exitValue = bufferDrainer.drainBuffers(); final StringBuffer errorBuffer = bufferDrainer.getErrorBuffer(); if (exitValue != 0) { String error = getTruncatedString(errorBuffer); throw new IOException(XLog.format("Not able to perform operation [{0}]", command) + " | " + "ErrorStream: " + error); } return exitValue; } /** * Do ssh action execution setup on remote host. * * @param host host name. * @param context action execution context. * @param action action object. * @return remote host working directory. * @throws IOException thrown if failed to setup. * @throws InterruptedException thrown if any interruption happens. */ protected String setupRemote(String host, Context context, WorkflowAction action) throws IOException, InterruptedException { LOG.info("Attempting to copy ssh base scripts to remote host [{0}]", host); String localDirLocation = Services.get().getRuntimeDir() + "/ssh"; if (localDirLocation.endsWith("/")) { localDirLocation = localDirLocation.substring(0, localDirLocation.length() - 1); } File file = new File(localDirLocation + "/ssh-base.sh"); if (!file.exists()) { throw new IOException("Required Local file " + file.getAbsolutePath() + " not present."); } file = new File(localDirLocation + "/ssh-wrapper.sh"); if (!file.exists()) { throw new IOException("Required Local file " + file.getAbsolutePath() + " not present."); } String remoteDirLocation = getRemoteFileName(context, action, null, true, true); String command = XLog.format("{0}{1} mkdir -p {2} ", SSH_COMMAND_BASE, host, remoteDirLocation).toString(); executeCommand(command); command = XLog.format("{0}{1}/ssh-base.sh {2}/ssh-wrapper.sh {3}:{4}", SCP_COMMAND_BASE, localDirLocation, localDirLocation, host, remoteDirLocation); executeCommand(command); command = XLog.format("{0}{1} chmod +x {2}ssh-base.sh {3}ssh-wrapper.sh ", SSH_COMMAND_BASE, host, remoteDirLocation, remoteDirLocation); executeCommand(command); return remoteDirLocation; } /** * Execute the ssh command. * * @param host hostname. * @param dirLocation location of the base and wrapper scripts. * @param cmnd command to be executed. * @param args command arguments. * @param ignoreOutput ignore output option. * @param action action object. * @param recoveryId action id + run number to enable recovery in rerun * @param preserveArgs tell the ssh scripts to preserve or flatten the arguments * @return processSettings id of the running command. * @throws IOException thrown if failed to run the command. * @throws InterruptedException thrown if any interruption happens. */ protected String doExecute(String host, String dirLocation, String cmnd, String[] args, boolean ignoreOutput, WorkflowAction action, String recoveryId, boolean preserveArgs) throws IOException, InterruptedException { Runtime runtime = Runtime.getRuntime(); String callbackPost = ignoreOutput ? "_" : ConfigurationService.get(HTTP_COMMAND_OPTIONS).replace(" ", "%%%"); String preserveArgsS = preserveArgs ? "PRESERVE_ARGS" : "FLATTEN_ARGS"; // TODO check String callBackUrl = Services.get().get(CallbackService.class) .createCallBackUrl(action.getId(), EXT_STATUS_VAR); String command = XLog.format("{0}{1} {2}ssh-base.sh {3} {4} \"{5}\" \"{6}\" {7} {8} ", SSH_COMMAND_BASE, host, dirLocation, preserveArgsS, ConfigurationService.get(HTTP_COMMAND), callBackUrl, callbackPost, recoveryId, cmnd); String[] commandArray = command.split("\\s"); String[] finalCommand; if (args == null) { finalCommand = commandArray; } else { finalCommand = new String[commandArray.length + args.length]; System.arraycopy(commandArray, 0, finalCommand, 0, commandArray.length); System.arraycopy(args, 0, finalCommand, commandArray.length, args.length); } LOG.trace("Executing SSH command [finalCommand={0}]", Arrays.toString(finalCommand)); final Process p = runtime.exec(finalCommand); BufferDrainer bufferDrainer = new BufferDrainer(p, maxLen); final int exitValue = bufferDrainer.drainBuffers(); final StringBuffer inputBuffer = bufferDrainer.getInputBuffer(); final StringBuffer errorBuffer = bufferDrainer.getErrorBuffer(); final String pid = getFirstLine(inputBuffer); if (exitValue != 0) { String error = getTruncatedString(errorBuffer); throw new IOException(XLog.format("Not able to execute ssh-base.sh on {0}", host) + " | " + "ErrorStream: " + error); } LOG.trace("After execution pid={0}", pid); return pid; } /** * End action execution. * * @param context action execution context. * @param action action object. * @throws ActionExecutorException thrown if action end execution fails. */ public void end(final Context context, final WorkflowAction action) throws ActionExecutorException { if (action.getExternalStatus().equals("OK")) { context.setEndData(WorkflowAction.Status.OK, WorkflowAction.Status.OK.toString()); } else { context.setEndData(WorkflowAction.Status.ERROR, WorkflowAction.Status.ERROR.toString()); } boolean deleteTmpDir = ConfigurationService.getBoolean(DELETE_TMP_DIR); if (deleteTmpDir) { String tmpDir = getRemoteFileName(context, action, null, true, false); String removeTmpDirCmd = SSH_COMMAND_BASE + action.getTrackerUri() + " rm -rf " + tmpDir; int retVal = getReturnValue(removeTmpDirCmd); if (retVal != 0) { XLog.getLog(getClass()).warn("Cannot delete temp dir {0}", tmpDir); } } LOG.info("Action ended with external status [{0}]", action.getExternalStatus()); } /** * Get the return value of a processSettings. * * @param command command to be executed. * @return zero if execution is successful and any non zero value for failure. * @throws ActionExecutorException */ private int getReturnValue(String command) throws ActionExecutorException { LOG.trace("Getting return value for command={0}", command); int returnValue; Process ps = null; try { ps = Runtime.getRuntime().exec(command.split("\\s")); final BufferDrainer bufferDrainer = new BufferDrainer(ps, 0); returnValue = bufferDrainer.drainBuffers(); } catch (IOException e) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FAILED_OPERATION", XLog.format( "Not able to perform operation {0}", command), e); } finally { ps.destroy(); } LOG.trace("returnValue={0}", returnValue); return returnValue; } /** * Copy the ssh base and wrapper scripts to the local directory. */ @SuppressFBWarnings(value ="PATH_TRAVERSAL_OUT", justification = "Path is created runtime") private void initSshScripts() { String dirLocation = Services.get().getRuntimeDir() + "/ssh"; File path = new File(dirLocation); path.mkdirs(); if (!path.exists()) { throw new RuntimeException(XLog.format("Not able to create required directory {0}", dirLocation)); } try { IOUtils.copyCharStream(IOUtils.getResourceAsReader("ssh-base.sh", -1), new OutputStreamWriter( new FileOutputStream(dirLocation + "/ssh-base.sh"), StandardCharsets.UTF_8)); IOUtils.copyCharStream(IOUtils.getResourceAsReader("ssh-wrapper.sh", -1), new OutputStreamWriter( new FileOutputStream(dirLocation + "/ssh-wrapper.sh"), StandardCharsets.UTF_8)); } catch (IOException ie) { throw new RuntimeException(XLog.format("Not able to copy required scripts file to {0} " + "for SshActionHandler", dirLocation)); } } /** * Get action status. * * @param context executor context * @param action action object. * @return status of the action(RUNNING/OK/ERROR). * @throws ActionExecutorException thrown if there is any error in getting status. */ protected Status getActionStatus(Context context, WorkflowAction action) throws ActionExecutorException { String command = SSH_COMMAND_BASE + action.getTrackerUri() + " ps -p " + action.getExternalId(); Status aStatus; int returnValue = getReturnValue(command); if (returnValue == SSH_CONNECT_ERROR_CODE) { int maxRetryCount = ConfigurationService.getInt(CHECK_MAX_RETRIES, 3); long waitTime = ConfigurationService.getLong(CHECK_INITIAL_RETRY_WAIT_TIME, 3000); for (int retries = 1; retries <= maxRetryCount; retries++) { waitTime = handleRetry(waitTime, retries); returnValue = getReturnValue(command); if (returnValue != SSH_CONNECT_ERROR_CODE) { break; } } if (returnValue == SSH_CONNECT_ERROR_CODE) { throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, ERR_COULD_NOT_CONNECT, "Failed to connect to host [" + action.getTrackerUri() + "] for ssh action status check."); } } if (returnValue == 0) { aStatus = Status.RUNNING; } else { if (checkSSHActionFileExistence(context, action, "error")) { aStatus = Status.ERROR; } else { if (checkSSHActionFileExistence(context, action, "success")) { aStatus = Status.OK; } else { aStatus = Status.ERROR; } } } return aStatus; } private boolean checkSSHActionFileExistence(final Context context, final WorkflowAction action, String fileExtension) throws ActionExecutorException { String outFile = getRemoteFileName(context, action, fileExtension, false, true); String checkCmd = SSH_COMMAND_BASE + action.getTrackerUri() + " ls " + outFile; int retVal = getReturnValue(checkCmd); return retVal == 0 ? true : false; } private long handleRetry(long sleepBeforeRetryMs, final int retries) { LOG.warn("failed to check ssh action status, sleeping {0} milliseconds before retry #{1}", sleepBeforeRetryMs, retries); try { Thread.sleep(sleepBeforeRetryMs); } catch (InterruptedException e) { LOG.error("ssh action status check retry get interrupted during wait, caused by {0}", e.getMessage()); } sleepBeforeRetryMs *= 2; return sleepBeforeRetryMs; } /** * Execute the callable. * * @param callable required callable. * @throws ActionExecutorException thrown if there is any error in command execution. */ private <T> T execute(Callable<T> callable) throws ActionExecutorException { XLog log = XLog.getLog(getClass()); try { return callable.call(); } catch (IOException ex) { log.warn("Error while executing ssh EXECUTION"); String errorMessage = ex.getMessage(); if (null == errorMessage) { // Unknown IOException throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, ERR_UNKNOWN_ERROR, ex .getMessage(), ex); } // Host Resolution Issues else { if (errorMessage.contains("Could not resolve hostname") || errorMessage.contains("service not known")) { throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_HOST_RESOLUTION, ex .getMessage(), ex); } // Connection Timeout. Host temporarily down. else { if (errorMessage.contains("timed out")) { throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_COULD_NOT_CONNECT, ex.getMessage(), ex); }// Local ssh-base or ssh-wrapper missing else { if (errorMessage.contains("Required Local file")) { throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_FNF, ex.getMessage(), ex); // local_FNF }// Required oozie bash scripts missing, after the copy was // successful else { if (errorMessage.contains("No such file or directory") && (errorMessage.contains("ssh-base") || errorMessage.contains("ssh-wrapper"))) { throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_FNF, ex.getMessage(), ex); // remote // FNF } // Required application execution binary missing (either // caught by ssh-wrapper else { if (errorMessage.contains("command not found")) { throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_FNF, ex .getMessage(), ex); // remote // FNF } // Permission denied while connecting else { if (errorMessage.contains("Permission denied")) { throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_AUTH_FAILED, ex.getMessage(), ex); } // Permission denied while executing else { if (errorMessage.contains(": Permission denied")) { throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_NO_EXEC_PERM, ex.getMessage(), ex); } else { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, ERR_UNKNOWN_ERROR, ex.getMessage(), ex); } } } } } } } } } // Any other type of exception catch (Exception ex) { throw convertException(ex); } } /** * Checks whether the system is configured to always use the oozie user for ssh, and injects the user if required. * * @param host the host string. * @param context the execution context. * @return the modified host string with a user parameter added on if required. * @throws ActionExecutorException in case the flag to use the oozie user is turned on and there is a mismatch * between the user specified in the host and the oozie user. */ private String prepareUserHost(String host, Context context) throws ActionExecutorException { String oozieUser = context.getProtoActionConf().get(OozieClient.USER_NAME); if (allowSshUserAtHost) { if (!host.contains("@")) { host = oozieUser + "@" + host; } } else { if (host.contains("@")) { if (!host.toLowerCase().startsWith(oozieUser + "@")) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, ERR_USER_MISMATCH, XLog.format("user mismatch between oozie user [{0}] and ssh host [{1}]", oozieUser, host)); } } else { host = oozieUser + "@" + host; } } LOG.trace("User host is {0}", host); return host; } @Override public boolean isCompleted(String externalStatus) { return true; } /** * Truncate the string to max length. * * @param strBuffer * @return truncated string string */ private String getTruncatedString(StringBuffer strBuffer) { if (strBuffer.length() <= maxLen) { return strBuffer.toString(); } else { return strBuffer.substring(0, maxLen); } } /** * Returns the first line from a StringBuffer, recognized by the new line character \n. * * @param buffer The StringBuffer from which the first line is required. * @return The first line of the buffer. */ private String getFirstLine(StringBuffer buffer) { int newLineIndex = buffer.indexOf("\n"); if (newLineIndex == -1) { return buffer.toString(); } else { return buffer.substring(0, newLineIndex); } } }
googleapis/google-cloud-java
35,542
java-vision/proto-google-cloud-vision-v1p3beta1/src/main/java/com/google/cloud/vision/v1p3beta1/UpdateProductRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p3beta1/product_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vision.v1p3beta1; /** * * * <pre> * Request message for the `UpdateProduct` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.UpdateProductRequest} */ public final class UpdateProductRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p3beta1.UpdateProductRequest) UpdateProductRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateProductRequest.newBuilder() to construct. private UpdateProductRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateProductRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateProductRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.UpdateProductRequest.class, com.google.cloud.vision.v1p3beta1.UpdateProductRequest.Builder.class); } private int bitField0_; public static final int PRODUCT_FIELD_NUMBER = 1; private com.google.cloud.vision.v1p3beta1.Product product_; /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the product field is set. */ @java.lang.Override public boolean hasProduct() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The product. */ @java.lang.Override public com.google.cloud.vision.v1p3beta1.Product getProduct() { return product_ == null ? com.google.cloud.vision.v1p3beta1.Product.getDefaultInstance() : product_; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.vision.v1p3beta1.ProductOrBuilder getProductOrBuilder() { return product_ == null ? com.google.cloud.vision.v1p3beta1.Product.getDefaultInstance() : product_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getProduct()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getProduct()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p3beta1.UpdateProductRequest)) { return super.equals(obj); } com.google.cloud.vision.v1p3beta1.UpdateProductRequest other = (com.google.cloud.vision.v1p3beta1.UpdateProductRequest) obj; if (hasProduct() != other.hasProduct()) return false; if (hasProduct()) { if (!getProduct().equals(other.getProduct())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasProduct()) { hash = (37 * hash) + PRODUCT_FIELD_NUMBER; hash = (53 * hash) + getProduct().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p3beta1.UpdateProductRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `UpdateProduct` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.UpdateProductRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p3beta1.UpdateProductRequest) com.google.cloud.vision.v1p3beta1.UpdateProductRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.UpdateProductRequest.class, com.google.cloud.vision.v1p3beta1.UpdateProductRequest.Builder.class); } // Construct using com.google.cloud.vision.v1p3beta1.UpdateProductRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getProductFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; product_ = null; if (productBuilder_ != null) { productBuilder_.dispose(); productBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_UpdateProductRequest_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductRequest getDefaultInstanceForType() { return com.google.cloud.vision.v1p3beta1.UpdateProductRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductRequest build() { com.google.cloud.vision.v1p3beta1.UpdateProductRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductRequest buildPartial() { com.google.cloud.vision.v1p3beta1.UpdateProductRequest result = new com.google.cloud.vision.v1p3beta1.UpdateProductRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.vision.v1p3beta1.UpdateProductRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.product_ = productBuilder_ == null ? product_ : productBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p3beta1.UpdateProductRequest) { return mergeFrom((com.google.cloud.vision.v1p3beta1.UpdateProductRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p3beta1.UpdateProductRequest other) { if (other == com.google.cloud.vision.v1p3beta1.UpdateProductRequest.getDefaultInstance()) return this; if (other.hasProduct()) { mergeProduct(other.getProduct()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getProductFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.vision.v1p3beta1.Product product_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p3beta1.Product, com.google.cloud.vision.v1p3beta1.Product.Builder, com.google.cloud.vision.v1p3beta1.ProductOrBuilder> productBuilder_; /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the product field is set. */ public boolean hasProduct() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The product. */ public com.google.cloud.vision.v1p3beta1.Product getProduct() { if (productBuilder_ == null) { return product_ == null ? com.google.cloud.vision.v1p3beta1.Product.getDefaultInstance() : product_; } else { return productBuilder_.getMessage(); } } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProduct(com.google.cloud.vision.v1p3beta1.Product value) { if (productBuilder_ == null) { if (value == null) { throw new NullPointerException(); } product_ = value; } else { productBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProduct(com.google.cloud.vision.v1p3beta1.Product.Builder builderForValue) { if (productBuilder_ == null) { product_ = builderForValue.build(); } else { productBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeProduct(com.google.cloud.vision.v1p3beta1.Product value) { if (productBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && product_ != null && product_ != com.google.cloud.vision.v1p3beta1.Product.getDefaultInstance()) { getProductBuilder().mergeFrom(value); } else { product_ = value; } } else { productBuilder_.mergeFrom(value); } if (product_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearProduct() { bitField0_ = (bitField0_ & ~0x00000001); product_ = null; if (productBuilder_ != null) { productBuilder_.dispose(); productBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p3beta1.Product.Builder getProductBuilder() { bitField0_ |= 0x00000001; onChanged(); return getProductFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p3beta1.ProductOrBuilder getProductOrBuilder() { if (productBuilder_ != null) { return productBuilder_.getMessageOrBuilder(); } else { return product_ == null ? com.google.cloud.vision.v1p3beta1.Product.getDefaultInstance() : product_; } } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p3beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p3beta1.Product, com.google.cloud.vision.v1p3beta1.Product.Builder, com.google.cloud.vision.v1p3beta1.ProductOrBuilder> getProductFieldBuilder() { if (productBuilder_ == null) { productBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p3beta1.Product, com.google.cloud.vision.v1p3beta1.Product.Builder, com.google.cloud.vision.v1p3beta1.ProductOrBuilder>( getProduct(), getParentForChildren(), isClean()); product_ = null; } return productBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p3beta1.UpdateProductRequest) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p3beta1.UpdateProductRequest) private static final com.google.cloud.vision.v1p3beta1.UpdateProductRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p3beta1.UpdateProductRequest(); } public static com.google.cloud.vision.v1p3beta1.UpdateProductRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateProductRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateProductRequest>() { @java.lang.Override public UpdateProductRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateProductRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateProductRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.UpdateProductRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,542
java-vision/proto-google-cloud-vision-v1p4beta1/src/main/java/com/google/cloud/vision/v1p4beta1/UpdateProductRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p4beta1/product_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vision.v1p4beta1; /** * * * <pre> * Request message for the `UpdateProduct` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.UpdateProductRequest} */ public final class UpdateProductRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.UpdateProductRequest) UpdateProductRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateProductRequest.newBuilder() to construct. private UpdateProductRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateProductRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateProductRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.UpdateProductRequest.class, com.google.cloud.vision.v1p4beta1.UpdateProductRequest.Builder.class); } private int bitField0_; public static final int PRODUCT_FIELD_NUMBER = 1; private com.google.cloud.vision.v1p4beta1.Product product_; /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the product field is set. */ @java.lang.Override public boolean hasProduct() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The product. */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.Product getProduct() { return product_ == null ? com.google.cloud.vision.v1p4beta1.Product.getDefaultInstance() : product_; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.ProductOrBuilder getProductOrBuilder() { return product_ == null ? com.google.cloud.vision.v1p4beta1.Product.getDefaultInstance() : product_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getProduct()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getProduct()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p4beta1.UpdateProductRequest)) { return super.equals(obj); } com.google.cloud.vision.v1p4beta1.UpdateProductRequest other = (com.google.cloud.vision.v1p4beta1.UpdateProductRequest) obj; if (hasProduct() != other.hasProduct()) return false; if (hasProduct()) { if (!getProduct().equals(other.getProduct())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasProduct()) { hash = (37 * hash) + PRODUCT_FIELD_NUMBER; hash = (53 * hash) + getProduct().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p4beta1.UpdateProductRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `UpdateProduct` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.UpdateProductRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.UpdateProductRequest) com.google.cloud.vision.v1p4beta1.UpdateProductRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.UpdateProductRequest.class, com.google.cloud.vision.v1p4beta1.UpdateProductRequest.Builder.class); } // Construct using com.google.cloud.vision.v1p4beta1.UpdateProductRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getProductFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; product_ = null; if (productBuilder_ != null) { productBuilder_.dispose(); productBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_UpdateProductRequest_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductRequest getDefaultInstanceForType() { return com.google.cloud.vision.v1p4beta1.UpdateProductRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductRequest build() { com.google.cloud.vision.v1p4beta1.UpdateProductRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductRequest buildPartial() { com.google.cloud.vision.v1p4beta1.UpdateProductRequest result = new com.google.cloud.vision.v1p4beta1.UpdateProductRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.vision.v1p4beta1.UpdateProductRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.product_ = productBuilder_ == null ? product_ : productBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p4beta1.UpdateProductRequest) { return mergeFrom((com.google.cloud.vision.v1p4beta1.UpdateProductRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.UpdateProductRequest other) { if (other == com.google.cloud.vision.v1p4beta1.UpdateProductRequest.getDefaultInstance()) return this; if (other.hasProduct()) { mergeProduct(other.getProduct()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getProductFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.vision.v1p4beta1.Product product_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Product, com.google.cloud.vision.v1p4beta1.Product.Builder, com.google.cloud.vision.v1p4beta1.ProductOrBuilder> productBuilder_; /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the product field is set. */ public boolean hasProduct() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The product. */ public com.google.cloud.vision.v1p4beta1.Product getProduct() { if (productBuilder_ == null) { return product_ == null ? com.google.cloud.vision.v1p4beta1.Product.getDefaultInstance() : product_; } else { return productBuilder_.getMessage(); } } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProduct(com.google.cloud.vision.v1p4beta1.Product value) { if (productBuilder_ == null) { if (value == null) { throw new NullPointerException(); } product_ = value; } else { productBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProduct(com.google.cloud.vision.v1p4beta1.Product.Builder builderForValue) { if (productBuilder_ == null) { product_ = builderForValue.build(); } else { productBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeProduct(com.google.cloud.vision.v1p4beta1.Product value) { if (productBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && product_ != null && product_ != com.google.cloud.vision.v1p4beta1.Product.getDefaultInstance()) { getProductBuilder().mergeFrom(value); } else { product_ = value; } } else { productBuilder_.mergeFrom(value); } if (product_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearProduct() { bitField0_ = (bitField0_ & ~0x00000001); product_ = null; if (productBuilder_ != null) { productBuilder_.dispose(); productBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.Product.Builder getProductBuilder() { bitField0_ |= 0x00000001; onChanged(); return getProductFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.vision.v1p4beta1.ProductOrBuilder getProductOrBuilder() { if (productBuilder_ != null) { return productBuilder_.getMessageOrBuilder(); } else { return product_ == null ? com.google.cloud.vision.v1p4beta1.Product.getDefaultInstance() : product_; } } /** * * * <pre> * Required. The Product resource which replaces the one on the server. * product.name is immutable. * </pre> * * <code> * .google.cloud.vision.v1p4beta1.Product product = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Product, com.google.cloud.vision.v1p4beta1.Product.Builder, com.google.cloud.vision.v1p4beta1.ProductOrBuilder> getProductFieldBuilder() { if (productBuilder_ == null) { productBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Product, com.google.cloud.vision.v1p4beta1.Product.Builder, com.google.cloud.vision.v1p4beta1.ProductOrBuilder>( getProduct(), getParentForChildren(), isClean()); product_ = null; } return productBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The [FieldMask][google.protobuf.FieldMask] that specifies which fields * to update. * If update_mask isn't specified, all mutable fields are to be updated. * Valid mask paths include `product_labels`, `display_name`, and * `description`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.UpdateProductRequest) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.UpdateProductRequest) private static final com.google.cloud.vision.v1p4beta1.UpdateProductRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.UpdateProductRequest(); } public static com.google.cloud.vision.v1p4beta1.UpdateProductRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateProductRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateProductRequest>() { @java.lang.Override public UpdateProductRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateProductRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateProductRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.UpdateProductRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,656
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/AdGroupCriterionLabel.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/resources/ad_group_criterion_label.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.resources; /** * <pre> * A relationship between an ad group criterion and a label. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.AdGroupCriterionLabel} */ public final class AdGroupCriterionLabel extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.resources.AdGroupCriterionLabel) AdGroupCriterionLabelOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionLabel.newBuilder() to construct. private AdGroupCriterionLabel(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionLabel() { resourceName_ = ""; adGroupCriterion_ = ""; label_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionLabel(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v19_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v19_resources_AdGroupCriterionLabel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.AdGroupCriterionLabel.class, com.google.ads.googleads.v19.resources.AdGroupCriterionLabel.Builder.class); } private int bitField0_; public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AD_GROUP_CRITERION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object adGroupCriterion_ = ""; /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the adGroupCriterion field is set. */ @java.lang.Override public boolean hasAdGroupCriterion() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The adGroupCriterion. */ @java.lang.Override public java.lang.String getAdGroupCriterion() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); adGroupCriterion_ = s; return s; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for adGroupCriterion. */ @java.lang.Override public com.google.protobuf.ByteString getAdGroupCriterionBytes() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); adGroupCriterion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LABEL_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object label_ = ""; /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the label field is set. */ @java.lang.Override public boolean hasLabel() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The label. */ @java.lang.Override public java.lang.String getLabel() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for label. */ @java.lang.Override public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, adGroupCriterion_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, label_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, adGroupCriterion_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, label_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.resources.AdGroupCriterionLabel)) { return super.equals(obj); } com.google.ads.googleads.v19.resources.AdGroupCriterionLabel other = (com.google.ads.googleads.v19.resources.AdGroupCriterionLabel) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasAdGroupCriterion() != other.hasAdGroupCriterion()) return false; if (hasAdGroupCriterion()) { if (!getAdGroupCriterion() .equals(other.getAdGroupCriterion())) return false; } if (hasLabel() != other.hasLabel()) return false; if (hasLabel()) { if (!getLabel() .equals(other.getLabel())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasAdGroupCriterion()) { hash = (37 * hash) + AD_GROUP_CRITERION_FIELD_NUMBER; hash = (53 * hash) + getAdGroupCriterion().hashCode(); } if (hasLabel()) { hash = (37 * hash) + LABEL_FIELD_NUMBER; hash = (53 * hash) + getLabel().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.resources.AdGroupCriterionLabel prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A relationship between an ad group criterion and a label. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.AdGroupCriterionLabel} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.resources.AdGroupCriterionLabel) com.google.ads.googleads.v19.resources.AdGroupCriterionLabelOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v19_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v19_resources_AdGroupCriterionLabel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.AdGroupCriterionLabel.class, com.google.ads.googleads.v19.resources.AdGroupCriterionLabel.Builder.class); } // Construct using com.google.ads.googleads.v19.resources.AdGroupCriterionLabel.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; adGroupCriterion_ = ""; label_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v19_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionLabel getDefaultInstanceForType() { return com.google.ads.googleads.v19.resources.AdGroupCriterionLabel.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionLabel build() { com.google.ads.googleads.v19.resources.AdGroupCriterionLabel result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionLabel buildPartial() { com.google.ads.googleads.v19.resources.AdGroupCriterionLabel result = new com.google.ads.googleads.v19.resources.AdGroupCriterionLabel(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.resources.AdGroupCriterionLabel result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.adGroupCriterion_ = adGroupCriterion_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.label_ = label_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.resources.AdGroupCriterionLabel) { return mergeFrom((com.google.ads.googleads.v19.resources.AdGroupCriterionLabel)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.resources.AdGroupCriterionLabel other) { if (other == com.google.ads.googleads.v19.resources.AdGroupCriterionLabel.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasAdGroupCriterion()) { adGroupCriterion_ = other.adGroupCriterion_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasLabel()) { label_ = other.label_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 34: { adGroupCriterion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 34 case 42: { label_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object adGroupCriterion_ = ""; /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the adGroupCriterion field is set. */ public boolean hasAdGroupCriterion() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The adGroupCriterion. */ public java.lang.String getAdGroupCriterion() { java.lang.Object ref = adGroupCriterion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); adGroupCriterion_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for adGroupCriterion. */ public com.google.protobuf.ByteString getAdGroupCriterionBytes() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); adGroupCriterion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The adGroupCriterion to set. * @return This builder for chaining. */ public Builder setAdGroupCriterion( java.lang.String value) { if (value == null) { throw new NullPointerException(); } adGroupCriterion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAdGroupCriterion() { adGroupCriterion_ = getDefaultInstance().getAdGroupCriterion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for adGroupCriterion to set. * @return This builder for chaining. */ public Builder setAdGroupCriterionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); adGroupCriterion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object label_ = ""; /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the label field is set. */ public boolean hasLabel() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The label. */ public java.lang.String getLabel() { java.lang.Object ref = label_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for label. */ public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The label to set. * @return This builder for chaining. */ public Builder setLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } label_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearLabel() { label_ = getDefaultInstance().getLabel(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for label to set. * @return This builder for chaining. */ public Builder setLabelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); label_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.resources.AdGroupCriterionLabel) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.resources.AdGroupCriterionLabel) private static final com.google.ads.googleads.v19.resources.AdGroupCriterionLabel DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.resources.AdGroupCriterionLabel(); } public static com.google.ads.googleads.v19.resources.AdGroupCriterionLabel getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionLabel> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionLabel>() { @java.lang.Override public AdGroupCriterionLabel parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionLabel> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionLabel> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionLabel getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,656
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/AdGroupCriterionLabel.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/resources/ad_group_criterion_label.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.resources; /** * <pre> * A relationship between an ad group criterion and a label. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.AdGroupCriterionLabel} */ public final class AdGroupCriterionLabel extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.resources.AdGroupCriterionLabel) AdGroupCriterionLabelOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionLabel.newBuilder() to construct. private AdGroupCriterionLabel(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionLabel() { resourceName_ = ""; adGroupCriterion_ = ""; label_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionLabel(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v20_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v20_resources_AdGroupCriterionLabel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.AdGroupCriterionLabel.class, com.google.ads.googleads.v20.resources.AdGroupCriterionLabel.Builder.class); } private int bitField0_; public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AD_GROUP_CRITERION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object adGroupCriterion_ = ""; /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the adGroupCriterion field is set. */ @java.lang.Override public boolean hasAdGroupCriterion() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The adGroupCriterion. */ @java.lang.Override public java.lang.String getAdGroupCriterion() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); adGroupCriterion_ = s; return s; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for adGroupCriterion. */ @java.lang.Override public com.google.protobuf.ByteString getAdGroupCriterionBytes() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); adGroupCriterion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LABEL_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object label_ = ""; /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the label field is set. */ @java.lang.Override public boolean hasLabel() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The label. */ @java.lang.Override public java.lang.String getLabel() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for label. */ @java.lang.Override public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, adGroupCriterion_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, label_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, adGroupCriterion_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, label_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.resources.AdGroupCriterionLabel)) { return super.equals(obj); } com.google.ads.googleads.v20.resources.AdGroupCriterionLabel other = (com.google.ads.googleads.v20.resources.AdGroupCriterionLabel) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasAdGroupCriterion() != other.hasAdGroupCriterion()) return false; if (hasAdGroupCriterion()) { if (!getAdGroupCriterion() .equals(other.getAdGroupCriterion())) return false; } if (hasLabel() != other.hasLabel()) return false; if (hasLabel()) { if (!getLabel() .equals(other.getLabel())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasAdGroupCriterion()) { hash = (37 * hash) + AD_GROUP_CRITERION_FIELD_NUMBER; hash = (53 * hash) + getAdGroupCriterion().hashCode(); } if (hasLabel()) { hash = (37 * hash) + LABEL_FIELD_NUMBER; hash = (53 * hash) + getLabel().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.resources.AdGroupCriterionLabel prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A relationship between an ad group criterion and a label. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.AdGroupCriterionLabel} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.resources.AdGroupCriterionLabel) com.google.ads.googleads.v20.resources.AdGroupCriterionLabelOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v20_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v20_resources_AdGroupCriterionLabel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.AdGroupCriterionLabel.class, com.google.ads.googleads.v20.resources.AdGroupCriterionLabel.Builder.class); } // Construct using com.google.ads.googleads.v20.resources.AdGroupCriterionLabel.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; adGroupCriterion_ = ""; label_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v20_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionLabel getDefaultInstanceForType() { return com.google.ads.googleads.v20.resources.AdGroupCriterionLabel.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionLabel build() { com.google.ads.googleads.v20.resources.AdGroupCriterionLabel result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionLabel buildPartial() { com.google.ads.googleads.v20.resources.AdGroupCriterionLabel result = new com.google.ads.googleads.v20.resources.AdGroupCriterionLabel(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.resources.AdGroupCriterionLabel result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.adGroupCriterion_ = adGroupCriterion_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.label_ = label_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.resources.AdGroupCriterionLabel) { return mergeFrom((com.google.ads.googleads.v20.resources.AdGroupCriterionLabel)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.resources.AdGroupCriterionLabel other) { if (other == com.google.ads.googleads.v20.resources.AdGroupCriterionLabel.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasAdGroupCriterion()) { adGroupCriterion_ = other.adGroupCriterion_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasLabel()) { label_ = other.label_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 34: { adGroupCriterion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 34 case 42: { label_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object adGroupCriterion_ = ""; /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the adGroupCriterion field is set. */ public boolean hasAdGroupCriterion() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The adGroupCriterion. */ public java.lang.String getAdGroupCriterion() { java.lang.Object ref = adGroupCriterion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); adGroupCriterion_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for adGroupCriterion. */ public com.google.protobuf.ByteString getAdGroupCriterionBytes() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); adGroupCriterion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The adGroupCriterion to set. * @return This builder for chaining. */ public Builder setAdGroupCriterion( java.lang.String value) { if (value == null) { throw new NullPointerException(); } adGroupCriterion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAdGroupCriterion() { adGroupCriterion_ = getDefaultInstance().getAdGroupCriterion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for adGroupCriterion to set. * @return This builder for chaining. */ public Builder setAdGroupCriterionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); adGroupCriterion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object label_ = ""; /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the label field is set. */ public boolean hasLabel() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The label. */ public java.lang.String getLabel() { java.lang.Object ref = label_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for label. */ public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The label to set. * @return This builder for chaining. */ public Builder setLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } label_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearLabel() { label_ = getDefaultInstance().getLabel(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for label to set. * @return This builder for chaining. */ public Builder setLabelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); label_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.resources.AdGroupCriterionLabel) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.resources.AdGroupCriterionLabel) private static final com.google.ads.googleads.v20.resources.AdGroupCriterionLabel DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.resources.AdGroupCriterionLabel(); } public static com.google.ads.googleads.v20.resources.AdGroupCriterionLabel getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionLabel> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionLabel>() { @java.lang.Override public AdGroupCriterionLabel parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionLabel> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionLabel> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionLabel getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,656
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/AdGroupCriterionLabel.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/resources/ad_group_criterion_label.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.resources; /** * <pre> * A relationship between an ad group criterion and a label. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.AdGroupCriterionLabel} */ public final class AdGroupCriterionLabel extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.resources.AdGroupCriterionLabel) AdGroupCriterionLabelOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionLabel.newBuilder() to construct. private AdGroupCriterionLabel(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionLabel() { resourceName_ = ""; adGroupCriterion_ = ""; label_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionLabel(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v21_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v21_resources_AdGroupCriterionLabel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.AdGroupCriterionLabel.class, com.google.ads.googleads.v21.resources.AdGroupCriterionLabel.Builder.class); } private int bitField0_; public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AD_GROUP_CRITERION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object adGroupCriterion_ = ""; /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the adGroupCriterion field is set. */ @java.lang.Override public boolean hasAdGroupCriterion() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The adGroupCriterion. */ @java.lang.Override public java.lang.String getAdGroupCriterion() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); adGroupCriterion_ = s; return s; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for adGroupCriterion. */ @java.lang.Override public com.google.protobuf.ByteString getAdGroupCriterionBytes() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); adGroupCriterion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LABEL_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object label_ = ""; /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the label field is set. */ @java.lang.Override public boolean hasLabel() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The label. */ @java.lang.Override public java.lang.String getLabel() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for label. */ @java.lang.Override public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, adGroupCriterion_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, label_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, adGroupCriterion_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, label_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.resources.AdGroupCriterionLabel)) { return super.equals(obj); } com.google.ads.googleads.v21.resources.AdGroupCriterionLabel other = (com.google.ads.googleads.v21.resources.AdGroupCriterionLabel) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasAdGroupCriterion() != other.hasAdGroupCriterion()) return false; if (hasAdGroupCriterion()) { if (!getAdGroupCriterion() .equals(other.getAdGroupCriterion())) return false; } if (hasLabel() != other.hasLabel()) return false; if (hasLabel()) { if (!getLabel() .equals(other.getLabel())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasAdGroupCriterion()) { hash = (37 * hash) + AD_GROUP_CRITERION_FIELD_NUMBER; hash = (53 * hash) + getAdGroupCriterion().hashCode(); } if (hasLabel()) { hash = (37 * hash) + LABEL_FIELD_NUMBER; hash = (53 * hash) + getLabel().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.resources.AdGroupCriterionLabel prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A relationship between an ad group criterion and a label. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.AdGroupCriterionLabel} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.resources.AdGroupCriterionLabel) com.google.ads.googleads.v21.resources.AdGroupCriterionLabelOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v21_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v21_resources_AdGroupCriterionLabel_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.AdGroupCriterionLabel.class, com.google.ads.googleads.v21.resources.AdGroupCriterionLabel.Builder.class); } // Construct using com.google.ads.googleads.v21.resources.AdGroupCriterionLabel.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; adGroupCriterion_ = ""; label_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.resources.AdGroupCriterionLabelProto.internal_static_google_ads_googleads_v21_resources_AdGroupCriterionLabel_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionLabel getDefaultInstanceForType() { return com.google.ads.googleads.v21.resources.AdGroupCriterionLabel.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionLabel build() { com.google.ads.googleads.v21.resources.AdGroupCriterionLabel result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionLabel buildPartial() { com.google.ads.googleads.v21.resources.AdGroupCriterionLabel result = new com.google.ads.googleads.v21.resources.AdGroupCriterionLabel(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.resources.AdGroupCriterionLabel result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.adGroupCriterion_ = adGroupCriterion_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.label_ = label_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.resources.AdGroupCriterionLabel) { return mergeFrom((com.google.ads.googleads.v21.resources.AdGroupCriterionLabel)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.resources.AdGroupCriterionLabel other) { if (other == com.google.ads.googleads.v21.resources.AdGroupCriterionLabel.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasAdGroupCriterion()) { adGroupCriterion_ = other.adGroupCriterion_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasLabel()) { label_ = other.label_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 34: { adGroupCriterion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 34 case 42: { label_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Immutable. The resource name of the ad group criterion label. * Ad group criterion label resource names have the form: * `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object adGroupCriterion_ = ""; /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the adGroupCriterion field is set. */ public boolean hasAdGroupCriterion() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The adGroupCriterion. */ public java.lang.String getAdGroupCriterion() { java.lang.Object ref = adGroupCriterion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); adGroupCriterion_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for adGroupCriterion. */ public com.google.protobuf.ByteString getAdGroupCriterionBytes() { java.lang.Object ref = adGroupCriterion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); adGroupCriterion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The adGroupCriterion to set. * @return This builder for chaining. */ public Builder setAdGroupCriterion( java.lang.String value) { if (value == null) { throw new NullPointerException(); } adGroupCriterion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAdGroupCriterion() { adGroupCriterion_ = getDefaultInstance().getAdGroupCriterion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Immutable. The ad group criterion to which the label is attached. * </pre> * * <code>optional string ad_group_criterion = 4 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for adGroupCriterion to set. * @return This builder for chaining. */ public Builder setAdGroupCriterionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); adGroupCriterion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object label_ = ""; /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the label field is set. */ public boolean hasLabel() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The label. */ public java.lang.String getLabel() { java.lang.Object ref = label_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); label_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for label. */ public com.google.protobuf.ByteString getLabelBytes() { java.lang.Object ref = label_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); label_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The label to set. * @return This builder for chaining. */ public Builder setLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } label_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearLabel() { label_ = getDefaultInstance().getLabel(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * Immutable. The label assigned to the ad group criterion. * </pre> * * <code>optional string label = 5 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for label to set. * @return This builder for chaining. */ public Builder setLabelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); label_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.resources.AdGroupCriterionLabel) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.resources.AdGroupCriterionLabel) private static final com.google.ads.googleads.v21.resources.AdGroupCriterionLabel DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.resources.AdGroupCriterionLabel(); } public static com.google.ads.googleads.v21.resources.AdGroupCriterionLabel getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionLabel> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionLabel>() { @java.lang.Override public AdGroupCriterionLabel parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionLabel> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionLabel> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionLabel getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,484
java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/ListVersionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apihub/v1/apihub_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apihub.v1; /** * * * <pre> * The [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] method's * response. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.ListVersionsResponse} */ public final class ListVersionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.ListVersionsResponse) ListVersionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListVersionsResponse.newBuilder() to construct. private ListVersionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListVersionsResponse() { versions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListVersionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_ListVersionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_ListVersionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.ListVersionsResponse.class, com.google.cloud.apihub.v1.ListVersionsResponse.Builder.class); } public static final int VERSIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.apihub.v1.Version> versions_; /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.apihub.v1.Version> getVersionsList() { return versions_; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.apihub.v1.VersionOrBuilder> getVersionsOrBuilderList() { return versions_; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ @java.lang.Override public int getVersionsCount() { return versions_.size(); } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ @java.lang.Override public com.google.cloud.apihub.v1.Version getVersions(int index) { return versions_.get(index); } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ @java.lang.Override public com.google.cloud.apihub.v1.VersionOrBuilder getVersionsOrBuilder(int index) { return versions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < versions_.size(); i++) { output.writeMessage(1, versions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < versions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, versions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apihub.v1.ListVersionsResponse)) { return super.equals(obj); } com.google.cloud.apihub.v1.ListVersionsResponse other = (com.google.cloud.apihub.v1.ListVersionsResponse) obj; if (!getVersionsList().equals(other.getVersionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getVersionsCount() > 0) { hash = (37 * hash) + VERSIONS_FIELD_NUMBER; hash = (53 * hash) + getVersionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.ListVersionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.apihub.v1.ListVersionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] method's * response. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.ListVersionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.ListVersionsResponse) com.google.cloud.apihub.v1.ListVersionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_ListVersionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_ListVersionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.ListVersionsResponse.class, com.google.cloud.apihub.v1.ListVersionsResponse.Builder.class); } // Construct using com.google.cloud.apihub.v1.ListVersionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (versionsBuilder_ == null) { versions_ = java.util.Collections.emptyList(); } else { versions_ = null; versionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_ListVersionsResponse_descriptor; } @java.lang.Override public com.google.cloud.apihub.v1.ListVersionsResponse getDefaultInstanceForType() { return com.google.cloud.apihub.v1.ListVersionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apihub.v1.ListVersionsResponse build() { com.google.cloud.apihub.v1.ListVersionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apihub.v1.ListVersionsResponse buildPartial() { com.google.cloud.apihub.v1.ListVersionsResponse result = new com.google.cloud.apihub.v1.ListVersionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.apihub.v1.ListVersionsResponse result) { if (versionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { versions_ = java.util.Collections.unmodifiableList(versions_); bitField0_ = (bitField0_ & ~0x00000001); } result.versions_ = versions_; } else { result.versions_ = versionsBuilder_.build(); } } private void buildPartial0(com.google.cloud.apihub.v1.ListVersionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apihub.v1.ListVersionsResponse) { return mergeFrom((com.google.cloud.apihub.v1.ListVersionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.apihub.v1.ListVersionsResponse other) { if (other == com.google.cloud.apihub.v1.ListVersionsResponse.getDefaultInstance()) return this; if (versionsBuilder_ == null) { if (!other.versions_.isEmpty()) { if (versions_.isEmpty()) { versions_ = other.versions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureVersionsIsMutable(); versions_.addAll(other.versions_); } onChanged(); } } else { if (!other.versions_.isEmpty()) { if (versionsBuilder_.isEmpty()) { versionsBuilder_.dispose(); versionsBuilder_ = null; versions_ = other.versions_; bitField0_ = (bitField0_ & ~0x00000001); versionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getVersionsFieldBuilder() : null; } else { versionsBuilder_.addAllMessages(other.versions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.apihub.v1.Version m = input.readMessage( com.google.cloud.apihub.v1.Version.parser(), extensionRegistry); if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.add(m); } else { versionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.apihub.v1.Version> versions_ = java.util.Collections.emptyList(); private void ensureVersionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { versions_ = new java.util.ArrayList<com.google.cloud.apihub.v1.Version>(versions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apihub.v1.Version, com.google.cloud.apihub.v1.Version.Builder, com.google.cloud.apihub.v1.VersionOrBuilder> versionsBuilder_; /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public java.util.List<com.google.cloud.apihub.v1.Version> getVersionsList() { if (versionsBuilder_ == null) { return java.util.Collections.unmodifiableList(versions_); } else { return versionsBuilder_.getMessageList(); } } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public int getVersionsCount() { if (versionsBuilder_ == null) { return versions_.size(); } else { return versionsBuilder_.getCount(); } } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public com.google.cloud.apihub.v1.Version getVersions(int index) { if (versionsBuilder_ == null) { return versions_.get(index); } else { return versionsBuilder_.getMessage(index); } } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder setVersions(int index, com.google.cloud.apihub.v1.Version value) { if (versionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVersionsIsMutable(); versions_.set(index, value); onChanged(); } else { versionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder setVersions( int index, com.google.cloud.apihub.v1.Version.Builder builderForValue) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.set(index, builderForValue.build()); onChanged(); } else { versionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder addVersions(com.google.cloud.apihub.v1.Version value) { if (versionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVersionsIsMutable(); versions_.add(value); onChanged(); } else { versionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder addVersions(int index, com.google.cloud.apihub.v1.Version value) { if (versionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVersionsIsMutable(); versions_.add(index, value); onChanged(); } else { versionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder addVersions(com.google.cloud.apihub.v1.Version.Builder builderForValue) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.add(builderForValue.build()); onChanged(); } else { versionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder addVersions( int index, com.google.cloud.apihub.v1.Version.Builder builderForValue) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.add(index, builderForValue.build()); onChanged(); } else { versionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder addAllVersions( java.lang.Iterable<? extends com.google.cloud.apihub.v1.Version> values) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, versions_); onChanged(); } else { versionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder clearVersions() { if (versionsBuilder_ == null) { versions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { versionsBuilder_.clear(); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public Builder removeVersions(int index) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.remove(index); onChanged(); } else { versionsBuilder_.remove(index); } return this; } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public com.google.cloud.apihub.v1.Version.Builder getVersionsBuilder(int index) { return getVersionsFieldBuilder().getBuilder(index); } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public com.google.cloud.apihub.v1.VersionOrBuilder getVersionsOrBuilder(int index) { if (versionsBuilder_ == null) { return versions_.get(index); } else { return versionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public java.util.List<? extends com.google.cloud.apihub.v1.VersionOrBuilder> getVersionsOrBuilderList() { if (versionsBuilder_ != null) { return versionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(versions_); } } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public com.google.cloud.apihub.v1.Version.Builder addVersionsBuilder() { return getVersionsFieldBuilder() .addBuilder(com.google.cloud.apihub.v1.Version.getDefaultInstance()); } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public com.google.cloud.apihub.v1.Version.Builder addVersionsBuilder(int index) { return getVersionsFieldBuilder() .addBuilder(index, com.google.cloud.apihub.v1.Version.getDefaultInstance()); } /** * * * <pre> * The versions corresponding to an API. * </pre> * * <code>repeated .google.cloud.apihub.v1.Version versions = 1;</code> */ public java.util.List<com.google.cloud.apihub.v1.Version.Builder> getVersionsBuilderList() { return getVersionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apihub.v1.Version, com.google.cloud.apihub.v1.Version.Builder, com.google.cloud.apihub.v1.VersionOrBuilder> getVersionsFieldBuilder() { if (versionsBuilder_ == null) { versionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apihub.v1.Version, com.google.cloud.apihub.v1.Version.Builder, com.google.cloud.apihub.v1.VersionOrBuilder>( versions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); versions_ = null; } return versionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.ListVersionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.ListVersionsResponse) private static final com.google.cloud.apihub.v1.ListVersionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.ListVersionsResponse(); } public static com.google.cloud.apihub.v1.ListVersionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListVersionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListVersionsResponse>() { @java.lang.Override public ListVersionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListVersionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListVersionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apihub.v1.ListVersionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ignite-3
35,192
modules/catalog-dsl/src/integrationTest/java/org/apache/ignite/internal/catalog/ItCatalogDslTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.catalog; import static org.apache.ignite.catalog.ColumnType.INTEGER; import static org.apache.ignite.catalog.ColumnType.VARCHAR; import static org.apache.ignite.catalog.definitions.ColumnDefinition.column; import static org.apache.ignite.internal.TestDefaultProfilesNames.DEFAULT_AIPERSIST_PROFILE_NAME; import static org.apache.ignite.internal.catalog.commands.CatalogUtils.DEFAULT_FILTER; import static org.apache.ignite.internal.testframework.IgniteTestUtils.assertThrows; import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.will; import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.willCompleteSuccessfully; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import org.apache.ignite.catalog.ColumnSorted; import org.apache.ignite.catalog.ColumnType; import org.apache.ignite.catalog.IgniteCatalog; import org.apache.ignite.catalog.IndexType; import org.apache.ignite.catalog.SortOrder; import org.apache.ignite.catalog.annotations.Id; import org.apache.ignite.catalog.definitions.ColumnDefinition; import org.apache.ignite.catalog.definitions.IndexDefinition; import org.apache.ignite.catalog.definitions.TableDefinition; import org.apache.ignite.catalog.definitions.ZoneDefinition; import org.apache.ignite.internal.ClusterPerClassIntegrationTest; import org.apache.ignite.internal.catalog.descriptors.ConsistencyMode; import org.apache.ignite.internal.matcher.TableDefinitionMatcher; import org.apache.ignite.internal.matcher.ZoneDefinitionMatcher; import org.apache.ignite.sql.SqlException; import org.apache.ignite.table.KeyValueView; import org.apache.ignite.table.QualifiedName; import org.apache.ignite.table.RecordView; import org.apache.ignite.table.Table; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; @SuppressWarnings("ThrowableNotThrown") class ItCatalogDslTest extends ClusterPerClassIntegrationTest { static final String POJO_KV_TABLE_NAME = "POJO_KV_TEST"; static final String POJO_RECORD_TABLE_NAME = "pojo_record_test"; static final String EXPLICIT_QUOTES_TABLE_NAME = "explicit_quotes_test_table"; static final String ZONE_NAME = "ZONE_TEST"; private static final int KEY = 1; private static final PojoKey POJO_KEY = new PojoKey(KEY, String.valueOf(KEY)); private static final PojoValue POJO_VALUE = new PojoValue("fname", "lname", UUID.randomUUID().toString()); private static final Pojo POJO_RECORD = new Pojo(1, "1", "fname", "lname", UUID.randomUUID().toString()); @AfterEach void tearDown() { dropAllTables(); dropAllSchemas(); sql("DROP ZONE IF EXISTS " + ZONE_NAME); } @Test void zoneCreateAndDropByDefinition() { // Given zone definition ZoneDefinition zoneDefinition = ZoneDefinition.builder(ZONE_NAME) .distributionAlgorithm("rendezvous") .filter(DEFAULT_FILTER) .storageProfiles(DEFAULT_AIPERSIST_PROFILE_NAME) .build(); // When create zone from definition assertThat(catalog().createZoneAsync(zoneDefinition), willCompleteSuccessfully()); // Then zone was created assertThrows( SqlException.class, () -> sql("CREATE ZONE " + ZONE_NAME + " STORAGE PROFILES ['" + DEFAULT_AIPERSIST_PROFILE_NAME + "']"), "Distribution zone with name '" + ZONE_NAME.toUpperCase() + "' already exists" ); // When drop zone by definition assertThat(catalog().dropZoneAsync(zoneDefinition), willCompleteSuccessfully()); // Then zone was dropped assertThrows( SqlException.class, () -> sql("DROP ZONE " + ZONE_NAME), "Distribution zone with name '" + ZONE_NAME.toUpperCase() + "' not found" ); } @Test void zoneCreateAndDropByName() { // Given zone definition ZoneDefinition zoneDefinition = ZoneDefinition .builder(ZONE_NAME) .storageProfiles(DEFAULT_AIPERSIST_PROFILE_NAME) .build(); // When create zone from definition assertThat(catalog().createZoneAsync(zoneDefinition), willCompleteSuccessfully()); // Then zone was created assertThrows( SqlException.class, () -> sql("CREATE ZONE " + ZONE_NAME + " STORAGE PROFILES ['" + DEFAULT_AIPERSIST_PROFILE_NAME + "']"), "Distribution zone with name '" + ZONE_NAME.toUpperCase() + "' already exists" ); // When drop zone by name assertThat(catalog().dropZoneAsync(ZONE_NAME), willCompleteSuccessfully()); // Then zone was dropped assertThrows( SqlException.class, () -> sql("DROP ZONE " + ZONE_NAME), "Distribution zone with name '" + ZONE_NAME.toUpperCase() + "' not found" ); } @Test void tableCreateAndDropByDefinition() { // Given table definition TableDefinition tableDefinition = TableDefinition.builder(POJO_KV_TABLE_NAME) .columns(column("id", ColumnType.INTEGER)) .primaryKey("id") .build(); // When create table from definition assertThat(catalog().createTableAsync(tableDefinition), will(not(nullValue()))); // Then table was created assertThrows( SqlException.class, () -> sql("CREATE TABLE " + POJO_KV_TABLE_NAME + " (id int PRIMARY KEY)"), "Table with name " + toFullTableName(POJO_KV_TABLE_NAME) + " already exists" ); // When drop table by definition assertThat(catalog().dropTableAsync(tableDefinition), willCompleteSuccessfully()); // Then table is dropped assertThrows( SqlException.class, () -> sql("DROP TABLE " + POJO_KV_TABLE_NAME), "Table with name " + toFullTableName(POJO_KV_TABLE_NAME) + " not found" ); } @Test void tableCreateAndDropByDefinitionWithExplicitQuotes() { // Given table definition TableDefinition tableDefinition = TableDefinition.builder(EXPLICIT_QUOTES_TABLE_NAME) .ifNotExists() .columns( column("ID", INTEGER), column("NAME", VARCHAR), column("\"FooBar\"", ColumnType.varchar(20).notNull().defaultValue("a")) ) .primaryKey("id") .index("name", "\"FooBar\"") .build(); // When create table from definition assertThat(catalog().createTableAsync(tableDefinition), will(not(nullValue()))); // Then table was created assertThrows( SqlException.class, () -> sql("CREATE TABLE " + EXPLICIT_QUOTES_TABLE_NAME + " (id int PRIMARY KEY)"), "Table with name " + toFullTableName(EXPLICIT_QUOTES_TABLE_NAME) + " already exists" ); // When drop table by definition assertThat(catalog().dropTableAsync(tableDefinition), willCompleteSuccessfully()); // Then table is dropped assertThrows( SqlException.class, () -> sql("DROP TABLE " + EXPLICIT_QUOTES_TABLE_NAME), "Table with name " + toFullTableName(EXPLICIT_QUOTES_TABLE_NAME) + " not found" ); } @Test void tableCreateAndDropByName() { // Given table definition TableDefinition tableDefinition = TableDefinition.builder(POJO_KV_TABLE_NAME) .columns(column("id", ColumnType.INTEGER)) .primaryKey("id") .build(); // When create table from definition assertThat(catalog().createTableAsync(tableDefinition), will(not(nullValue()))); // Then table was created assertThrows( SqlException.class, () -> sql("CREATE TABLE " + POJO_KV_TABLE_NAME + " (id int PRIMARY KEY)"), "Table with name " + toFullTableName(POJO_KV_TABLE_NAME) + " already exists" ); // When drop table by name assertThat(catalog().dropTableAsync(POJO_KV_TABLE_NAME), willCompleteSuccessfully()); // Then table is dropped assertThrows( SqlException.class, () -> sql("DROP TABLE " + POJO_KV_TABLE_NAME), "Table with name " + toFullTableName(POJO_KV_TABLE_NAME) + " not found" ); } @Test void tableCreateAndDropWithQualifiedName() { createTable(); QualifiedName name = QualifiedName.of("PUBLIC", POJO_KV_TABLE_NAME); // When drop table by qualified name with defined schema assertThat(catalog().dropTableAsync(name), willCompleteSuccessfully()); createTable(); name = QualifiedName.parse(POJO_KV_TABLE_NAME); // When drop table by qualified name without schema assertThat(catalog().dropTableAsync(name), willCompleteSuccessfully()); String missedSchemaName = "MISSEDSCHEMA"; QualifiedName nameWithMissedSchema = QualifiedName.of(missedSchemaName, POJO_KV_TABLE_NAME); // When drop table by qualified name with unknown schema then still completed successfully, because // CatalogDsl generates `DROP TABLE IF EXISTS` command assertThat(catalog().dropTableAsync(nameWithMissedSchema), willCompleteSuccessfully()); } private static void createTable() { TableDefinition tableDefinition = TableDefinition.builder(POJO_KV_TABLE_NAME) .columns(column("id", ColumnType.INTEGER)) .primaryKey("id") .build(); catalog().createTable(tableDefinition); } private static String toFullTableName(String tableName) { return "'PUBLIC." + tableName.toUpperCase() + "'"; } @Test void primitiveKeyKvViewFromAnnotation() throws Exception { CompletableFuture<Table> tableFuture = catalog().createTableAsync(Integer.class, PojoValue.class); assertThat(tableFuture, will(not(nullValue()))); KeyValueView<Integer, PojoValue> keyValueView = tableFuture.get() .keyValueView(Integer.class, PojoValue.class); keyValueView.put(null, KEY, POJO_VALUE); assertThat(keyValueView.get(null, KEY), is(POJO_VALUE)); } @Test void pojoKeyKvViewFromAnnotation() throws Exception { CompletableFuture<Table> tableFuture = catalog().createTableAsync(PojoKey.class, PojoValue.class); assertThat(tableFuture, will(not(nullValue()))); KeyValueView<PojoKey, PojoValue> keyValueView = tableFuture.get() .keyValueView(PojoKey.class, PojoValue.class); keyValueView.put(null, POJO_KEY, POJO_VALUE); assertThat(keyValueView.get(null, POJO_KEY), is(POJO_VALUE)); } @Test void primitiveKeyKvViewFromDefinition() throws Exception { TableDefinition definition = TableDefinition.builder(POJO_KV_TABLE_NAME) .key(Integer.class) .value(PojoValue.class) .build(); CompletableFuture<Table> tableFuture = catalog().createTableAsync(definition); assertThat(tableFuture, will(not(nullValue()))); KeyValueView<Integer, PojoValue> keyValueView = tableFuture.get().keyValueView(Integer.class, PojoValue.class); keyValueView.put(null, KEY, POJO_VALUE); assertThat(keyValueView.get(null, KEY), is(POJO_VALUE)); } @Test void pojoKeyKvViewFromDefinition() throws Exception { TableDefinition definition = TableDefinition.builder(POJO_KV_TABLE_NAME) .key(PojoKey.class) .value(PojoValue.class) .build(); CompletableFuture<Table> tableFuture = catalog().createTableAsync(definition); assertThat(tableFuture, will(not(nullValue()))); KeyValueView<PojoKey, PojoValue> keyValueView = tableFuture.get().keyValueView(PojoKey.class, PojoValue.class); keyValueView.put(null, POJO_KEY, POJO_VALUE); assertThat(keyValueView.get(null, POJO_KEY), is(POJO_VALUE)); } @Test void pojoRecordViewFromAnnotation() throws Exception { CompletableFuture<Table> tableFuture = catalog().createTableAsync(Pojo.class); assertThat(tableFuture, will(not(nullValue()))); RecordView<Pojo> recordView = tableFuture.get().recordView(Pojo.class); assertThat(recordView.insert(null, POJO_RECORD), is(true)); assertThat(recordView.get(null, POJO_RECORD), is(POJO_RECORD)); } @Test void pojoRecordViewFromDefinition() throws Exception { TableDefinition definition = TableDefinition.builder(POJO_RECORD_TABLE_NAME).record(Pojo.class).build(); CompletableFuture<Table> tableFuture = catalog().createTableAsync(definition); assertThat(tableFuture, will(not(nullValue()))); RecordView<Pojo> recordView = tableFuture.get().recordView(Pojo.class); assertThat(recordView.insert(null, POJO_RECORD), is(true)); assertThat(recordView.get(null, POJO_RECORD), is(POJO_RECORD)); } @Test void createFromAnnotationAndInsertBySql() throws Exception { CompletableFuture<Table> tableFuture = catalog().createTableAsync(Pojo.class); assertThat(tableFuture, will(not(nullValue()))); sql("insert into " + POJO_RECORD_TABLE_NAME + " (id, id_str, f_name, l_name, str) values (1, '1', 'f', 'l', 's')"); List<List<Object>> rows = sql("select * from " + POJO_RECORD_TABLE_NAME); assertThat(rows, contains(List.of(1, "1", "f", "l", "s"))); Pojo pojo = new Pojo(1, "1", "f", "l", "s"); assertThat(tableFuture.get().recordView(Pojo.class).get(null, pojo), is(pojo)); } @Test public void createAndGetDefinitionTest() { ZoneDefinition zoneDefinition = ZoneDefinition .builder(ZONE_NAME) .storageProfiles(DEFAULT_AIPERSIST_PROFILE_NAME) .partitions(3) .replicas(5) .quorumSize(2) .dataNodesAutoAdjustScaleDown(0) .dataNodesAutoAdjustScaleUp(1) .filter("$..*") .distributionAlgorithm("distributionAlgorithm") .consistencyMode(ConsistencyMode.HIGH_AVAILABILITY.name()) .build(); assertThat(catalog().createZoneAsync(zoneDefinition), willCompleteSuccessfully()); ZoneDefinition actual = catalog().zoneDefinition(ZONE_NAME); assertThat( actual, ZoneDefinitionMatcher.isZoneDefinition() .withZoneName(zoneDefinition.zoneName()) .withPartitions(zoneDefinition.partitions()) .withReplicas(zoneDefinition.replicas()) .withQuorumSize(zoneDefinition.quorumSize()) .withDataNodesAutoAdjustScaleDown(zoneDefinition.dataNodesAutoAdjustScaleDown()) .withDataNodesAutoAdjustScaleUp(zoneDefinition.dataNodesAutoAdjustScaleUp()) .withFilter(zoneDefinition.filter()) .withConsistencyMode(zoneDefinition.consistencyMode()) // TODO: https://issues.apache.org/jira/browse/IGNITE-22162 // .withDistributionAlgorithm(zoneDefinition.distributionAlgorithm()) ); ColumnDefinition column1 = column("COL1", ColumnType.INT32); ColumnDefinition column2 = column("COL2", ColumnType.INT64); ColumnDefinition column3 = column("COL3", ColumnType.BOOLEAN); ColumnDefinition column4 = column("COL4", ColumnType.VARCHAR); ColumnDefinition column5 = column("COL5", ColumnType.DECIMAL); TableDefinition definition = TableDefinition.builder(POJO_KV_TABLE_NAME) .zone(ZONE_NAME) .columns(List.of(column1, column2, column3, column4, column5)) .primaryKey(IndexType.HASH, ColumnSorted.column(column1.name()), ColumnSorted.column(column3.name())) .index("INDEX_1", IndexType.HASH, ColumnSorted.column(column2.name()), ColumnSorted.column(column5.name())) .colocateBy(column3.name()) .build(); assertThat(catalog().createTableAsync(definition), willCompleteSuccessfully()); assertThat(catalog().tableDefinitionAsync(POJO_KV_TABLE_NAME), willCompleteSuccessfully()); List<Supplier<TableDefinition>> apiCallVariations = List.of( () -> catalog().tableDefinitionAsync(POJO_KV_TABLE_NAME).join(), () -> catalog().tableDefinition(POJO_KV_TABLE_NAME), () -> catalog().tableDefinition(QualifiedName.of("PUBLIC", POJO_KV_TABLE_NAME)), () -> catalog().tableDefinition(QualifiedName.parse(POJO_KV_TABLE_NAME)) ); for (Supplier<TableDefinition> supp : apiCallVariations) { assertThat( supp.get(), TableDefinitionMatcher.isTableDefinition() .withTableName(definition.tableName()) .withZoneName(definition.zoneName()) .withColumns(definition.columns()) .withPkType(definition.primaryKeyType()) .withPkColumns(definition.primaryKeyColumns()) .withIndexes(definition.indexes()) .withColocationColumns(definition.colocationColumns()) ); } } @Test public void tableDefinitionWithIndexes() { sql("CREATE TABLE t (id int primary key, col1 varchar, col2 int, \"cOl3\" int)"); sql("CREATE INDEX t_sorted ON t USING SORTED (col2 DESC, col1, \"cOl3\")"); sql("CREATE INDEX t_hash ON t USING HASH (col1, col2)"); sql("CREATE SCHEMA s"); sql("CREATE TABLE s.t (id int primary key, col1 varchar, col2 int)"); { TableDefinition table = catalog().tableDefinition(QualifiedName.of("PUBLIC", "T")); List<IndexDefinition> indexes = table.indexes(); assertNotNull(indexes); Map<String, IndexDefinition> indexMap = indexes.stream() .collect(Collectors.toMap(IndexDefinition::name, Function.identity())); assertEquals(Set.of("T_SORTED", "T_HASH"), indexMap.keySet()); // primary index { assertEquals(IndexType.HASH, table.primaryKeyType()); assertEquals(List.of(ColumnSorted.column("ID")), table.primaryKeyColumns()); } // sorted index { IndexDefinition index = indexMap.get("T_SORTED"); assertEquals(IndexType.SORTED, index.type()); assertEquals(List.of( ColumnSorted.column("COL2", SortOrder.DESC_NULLS_FIRST), ColumnSorted.column("COL1", SortOrder.ASC_NULLS_LAST), ColumnSorted.column("cOl3", SortOrder.ASC_NULLS_LAST) ), index.columns() ); } // hash index { IndexDefinition index = indexMap.get("T_HASH"); assertEquals(IndexType.HASH, index.type()); assertEquals(List.of(ColumnSorted.column("COL1"), ColumnSorted.column("COL2")), index.columns()); } } { TableDefinition table = catalog().tableDefinition(QualifiedName.of("S", "T")); List<IndexDefinition> indexes = table.indexes(); assertNotNull(indexes); Map<String, IndexDefinition> indexMap = indexes.stream() .collect(Collectors.toMap(IndexDefinition::name, Function.identity())); assertEquals(Set.of(), indexMap.keySet()); // primary index { assertEquals(IndexType.HASH, table.primaryKeyType()); assertEquals(List.of(ColumnSorted.column("ID")), table.primaryKeyColumns()); } } } @Test public void tableDefinitionWithColocationColumn() { sql("CREATE TABLE t1 (id int, col1 varchar, col2 int, PRIMARY KEY (id))"); { TableDefinition table = catalog().tableDefinition(QualifiedName.of("PUBLIC", "T1")); assertEquals(List.of("ID"), table.primaryKeyColumns().stream() .map(ColumnSorted::columnName) .collect(Collectors.toList()) ); assertEquals(List.of("ID"), table.colocationColumns()); } sql("CREATE TABLE t2 (id int, col1 varchar, col2 int, PRIMARY KEY (col1, col2) ) COLOCATE BY (col1)"); { TableDefinition table = catalog().tableDefinition(QualifiedName.of("PUBLIC", "T2")); assertEquals(List.of("COL1", "COL2"), table.primaryKeyColumns().stream() .map(ColumnSorted::columnName) .collect(Collectors.toList()) ); assertEquals(List.of("COL1"), table.colocationColumns()); } sql("CREATE TABLE t3 (id int, col1 varchar, col2 int, PRIMARY KEY (col1, col2) ) COLOCATE BY (col2, col1)"); { TableDefinition table = catalog().tableDefinition(QualifiedName.of("PUBLIC", "T3")); assertEquals(List.of("COL1", "COL2"), table.primaryKeyColumns().stream() .map(ColumnSorted::columnName) .collect(Collectors.toList()) ); assertEquals(List.of("COL2", "COL1"), table.colocationColumns()); } sql("CREATE TABLE t4 (id int, col1 varchar, col2 int, PRIMARY KEY (col2, col1) ) COLOCATE BY (col1, col2)"); { TableDefinition table = catalog().tableDefinition(QualifiedName.of("PUBLIC", "T4")); assertEquals(List.of("COL2", "COL1"), table.primaryKeyColumns().stream() .map(ColumnSorted::columnName) .collect(Collectors.toList()) ); assertEquals(List.of("COL1", "COL2"), table.colocationColumns()); } } @SuppressWarnings("DataFlowIssue") @Test public void createAllColumnTypesFromPojo() { Table table = catalog().createTable(AllColumnTypesPojo.class); assertEquals("ALLCOLUMNTYPESPOJO", table.qualifiedName().objectName()); TableDefinition tableDef = catalog().tableDefinition(table.qualifiedName()); assertEquals(tableDef.tableName(), tableDef.tableName()); List<ColumnDefinition> columns = tableDef.columns(); assertEquals(15, columns.size()); assertEquals("STR", columns.get(0).name()); assertEquals("VARCHAR", columns.get(0).type().typeName()); assertEquals("BYTECOL", columns.get(1).name()); assertEquals("TINYINT", columns.get(1).type().typeName()); assertEquals("SHORTCOL", columns.get(2).name()); assertEquals("SMALLINT", columns.get(2).type().typeName()); assertEquals("INTCOL", columns.get(3).name()); assertEquals("INT", columns.get(3).type().typeName()); assertEquals("LONGCOL", columns.get(4).name()); assertEquals("BIGINT", columns.get(4).type().typeName()); assertEquals("FLOATCOL", columns.get(5).name()); assertEquals("REAL", columns.get(5).type().typeName()); assertEquals("DOUBLECOL", columns.get(6).name()); assertEquals("DOUBLE", columns.get(6).type().typeName()); assertEquals("DECIMALCOL", columns.get(7).name()); assertEquals("DECIMAL", columns.get(7).type().typeName()); assertEquals("BOOLCOL", columns.get(8).name()); assertEquals("BOOLEAN", columns.get(8).type().typeName()); assertEquals("BYTESCOL", columns.get(9).name()); assertEquals("VARBINARY", columns.get(9).type().typeName()); assertEquals("UUIDCOL", columns.get(10).name()); assertEquals("UUID", columns.get(10).type().typeName()); assertEquals("DATECOL", columns.get(11).name()); assertEquals("DATE", columns.get(11).type().typeName()); assertEquals("TIMECOL", columns.get(12).name()); assertEquals("TIME", columns.get(12).type().typeName()); assertEquals("DATETIMECOL", columns.get(13).name()); assertEquals("TIMESTAMP", columns.get(13).type().typeName()); assertEquals("INSTANTCOL", columns.get(14).name()); assertEquals("TIMESTAMP WITH LOCAL TIME ZONE", columns.get(14).type().typeName()); } @Test public void testQuotedZoneName() { IgniteCatalog catalog = CLUSTER.node(0).catalog(); sql("CREATE ZONE \"Some Zone\" STORAGE PROFILES ['" + DEFAULT_AIPERSIST_PROFILE_NAME + "']"); ZoneDefinition zone = catalog.zoneDefinition("Some Zone"); assertNotNull(zone); assertEquals("Some Zone", zone.zoneName()); } @Test public void testQuotedTableName() { IgniteCatalog catalog = CLUSTER.node(0).catalog(); sql("CREATE SCHEMA \"Table Schema\""); sql("CREATE TABLE \"Table Schema\".\"a b\" (id INT PRIMARY KEY, \"int val\" INT)"); sql("CREATE INDEX \"a b index\" ON \"Table Schema\".\"a b\" (\"int val\")"); QualifiedName name = QualifiedName.parse("\"Table Schema\".\"a b\""); TableDefinition table = catalog.tableDefinition(name); assertNotNull(table); // Table Name assertEquals("\"a b\"", table.tableName()); // Schema name assertEquals("\"Table Schema\"", table.schemaName()); // Column List<ColumnDefinition> columns = table.columns(); assertNotNull(columns); ColumnDefinition col = columns.stream() .filter(c -> "int val".equals(c.name())) .findAny() .orElse(null); assertNotNull(col, "Columns: " + columns.stream().map(ColumnDefinition::name).collect(Collectors.toList())); // Index List<IndexDefinition> indexes = table.indexes(); assertNotNull(indexes); IndexDefinition index = indexes .stream().filter(idx -> Objects.equals(idx.name(), "a b index")) .findAny().orElse(null); assertNotNull(index, "Indexes: " + indexes.stream().map(IndexDefinition::name).collect(Collectors.toList())); catalog.dropTable(table); } @Test public void createDifferentSchemaFromDefinition() { sql("CREATE SCHEMA s"); { TableDefinition def = TableDefinition.builder(POJO_KV_TABLE_NAME) .columns(column("id", INTEGER), column("fname", VARCHAR), column("lname", VARCHAR)) .primaryKey("id") .build(); Table table = catalog().createTable(def); QualifiedName qualifiedName = QualifiedName.of("PUBLIC", POJO_KV_TABLE_NAME); assertEquals(qualifiedName, table.qualifiedName()); TableDefinition tableDef = catalog().tableDefinition(def.qualifiedName()); assertEquals(qualifiedName, tableDef.qualifiedName()); assertEquals(POJO_KV_TABLE_NAME, tableDef.tableName()); assertEquals("PUBLIC", tableDef.schemaName()); } { TableDefinition def = TableDefinition.builder(POJO_KV_TABLE_NAME) .schema("s") .columns(column("id", INTEGER), column("fname", VARCHAR), column("lname", VARCHAR)) .primaryKey("id") .build(); Table table = catalog().createTable(def); QualifiedName qualifiedName = QualifiedName.of("S", POJO_KV_TABLE_NAME); assertEquals(qualifiedName, table.qualifiedName()); TableDefinition tableDef = catalog().tableDefinition(def.qualifiedName()); assertEquals(qualifiedName, tableDef.qualifiedName()); assertEquals(POJO_KV_TABLE_NAME, tableDef.tableName()); assertEquals("S", tableDef.schemaName()); } // Quoted names sql("CREATE SCHEMA \"a Schema\""); { TableDefinition def = TableDefinition.builder("A_TABLE") .schema("\"a Schema\"") .columns(column("id", INTEGER), column("fname", VARCHAR), column("lname", VARCHAR)) .primaryKey("id") .build(); Table table = catalog().createTable(def); QualifiedName qualifiedName = QualifiedName.of("\"a Schema\"", "A_TABLE"); assertEquals(qualifiedName, table.qualifiedName()); TableDefinition tableDef = catalog().tableDefinition(def.qualifiedName()); assertEquals(qualifiedName, tableDef.qualifiedName()); assertEquals("A_TABLE", tableDef.tableName()); assertEquals("\"a Schema\"", tableDef.schemaName()); } { TableDefinition def = TableDefinition.builder("\"a tablE\"") .schema("\"a Schema\"") .columns(column("id", INTEGER), column("fname", VARCHAR), column("lname", VARCHAR)) .primaryKey("id") .build(); Table table = catalog().createTable(def); QualifiedName qualifiedName = QualifiedName.of("\"a Schema\"", "\"a tablE\""); assertEquals(qualifiedName, table.qualifiedName()); TableDefinition tableDef = catalog().tableDefinition(def.qualifiedName()); assertEquals(qualifiedName, tableDef.qualifiedName()); assertEquals("\"a tablE\"", tableDef.tableName()); assertEquals("\"a Schema\"", tableDef.schemaName()); } } @Test public void createDifferentSchemaFromAnnotation() { sql("CREATE SCHEMA s"); { Table table = catalog().createTable(PojoClass1.class); QualifiedName qualifiedName = QualifiedName.of("PUBLIC", POJO_KV_TABLE_NAME); assertEquals(qualifiedName, table.qualifiedName()); TableDefinition def = catalog().tableDefinition(qualifiedName); assertEquals(qualifiedName, def.qualifiedName()); assertEquals(POJO_KV_TABLE_NAME, def.tableName()); assertEquals("PUBLIC", def.schemaName()); } { Table table = catalog().createTable(PojoClass2.class); QualifiedName qualifiedName = QualifiedName.of("S", POJO_KV_TABLE_NAME); assertEquals(qualifiedName, table.qualifiedName()); assertEquals(qualifiedName, catalog().tableDefinition(qualifiedName).qualifiedName()); TableDefinition def = catalog().tableDefinition(qualifiedName); assertEquals(qualifiedName, def.qualifiedName()); assertEquals(POJO_KV_TABLE_NAME, def.tableName()); assertEquals("S", def.schemaName()); } // Quoted names sql("CREATE SCHEMA \"a Schema\""); { Table table = catalog().createTable(PojoClass3.class); QualifiedName qualifiedName = QualifiedName.of("\"a Schema\"", "A_TABLE"); assertEquals(qualifiedName, table.qualifiedName()); TableDefinition tableDef = catalog().tableDefinition(qualifiedName); assertEquals(qualifiedName, tableDef.qualifiedName()); assertEquals("A_TABLE", tableDef.tableName()); assertEquals("\"a Schema\"", tableDef.schemaName()); } { Table table = catalog().createTable(PojoClass4.class); QualifiedName qualifiedName = QualifiedName.of("\"a Schema\"", "\"a tablE\""); assertEquals(qualifiedName, table.qualifiedName()); assertEquals(qualifiedName, catalog().tableDefinition(qualifiedName).qualifiedName()); TableDefinition tableDef = catalog().tableDefinition(qualifiedName); assertEquals(qualifiedName, tableDef.qualifiedName()); assertEquals("\"a tablE\"", tableDef.tableName()); assertEquals("\"a Schema\"", tableDef.schemaName()); } } private static IgniteCatalog catalog() { return CLUSTER.node(0).catalog(); } @org.apache.ignite.catalog.annotations.Table(POJO_KV_TABLE_NAME) private static class PojoClass1 { @Id @SuppressWarnings("unused") Integer id; @SuppressWarnings("unused") String fname; @SuppressWarnings("unused") String lname; } @org.apache.ignite.catalog.annotations.Table( value = POJO_KV_TABLE_NAME, schemaName = "S" ) private static class PojoClass2 { @Id @SuppressWarnings("unused") Integer id; @SuppressWarnings("unused") String fname; @SuppressWarnings("unused") String lname; } @org.apache.ignite.catalog.annotations.Table( value = "a_table", schemaName = "\"a Schema\"" ) private static class PojoClass3 { @Id @SuppressWarnings("unused") Integer id; @SuppressWarnings("unused") String fname; @SuppressWarnings("unused") String lname; } @org.apache.ignite.catalog.annotations.Table( value = "\"a tablE\"", schemaName = "\"a Schema\"" ) private static class PojoClass4 { @Id @SuppressWarnings("unused") Integer id; @SuppressWarnings("unused") String fname; @SuppressWarnings("unused") String lname; } }
apache/lucene
35,548
lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.codecs.blocktreeords; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.lucene.codecs.BlockTermState; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.NormsProducer; import org.apache.lucene.codecs.PostingsWriterBase; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.lucene103.blocktree.Lucene103BlockTreeTermsWriter; // javadocs import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.ToStringUtils; import org.apache.lucene.util.fst.BytesRefFSTEnum; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FSTCompiler; import org.apache.lucene.util.fst.Util; /* TODO: - Currently there is a one-to-one mapping of indexed term to term block, but we could decouple the two, ie, put more terms into the index than there are blocks. The index would take up more RAM but then it'd be able to avoid seeking more often and could make PK/FuzzyQ faster if the additional indexed terms could store the offset into the terms block. - The blocks are not written in true depth-first order, meaning if you just next() the file pointer will sometimes jump backwards. For example, block foo* will be written before block f* because it finished before. This could possibly hurt performance if the terms dict is not hot, since OSs anticipate sequential file access. We could fix the writer to re-order the blocks as a 2nd pass. - Each block encodes the term suffixes packed sequentially using a separate vInt per term, which is 1) wasteful and 2) slow (must linear scan to find a particular suffix). We should instead 1) make random-access array so we can directly access the Nth suffix, and 2) bulk-encode this array using bulk int[] codecs; then at search time we can binary search when we seek a particular term. */ /** * This is just like {@link Lucene103BlockTreeTermsWriter}, except it also stores a version per * term, and adds a method to its TermsEnum implementation to seekExact only if the version is &gt;= * the specified version. The version is added to the terms index to avoid seeking if no term in the * block has a high enough version. The term blocks file is .tiv and the terms index extension is * .tipv. * * @lucene.experimental */ public final class OrdsBlockTreeTermsWriter extends FieldsConsumer { static final FSTOrdsOutputs FST_OUTPUTS = new FSTOrdsOutputs(); static final Output NO_OUTPUT = FST_OUTPUTS.getNoOutput(); /** * Suggested default value for the {@code minItemsInBlock} parameter to {@link * #OrdsBlockTreeTermsWriter(SegmentWriteState,PostingsWriterBase,int,int)}. */ public static final int DEFAULT_MIN_BLOCK_SIZE = 25; /** * Suggested default value for the {@code maxItemsInBlock} parameter to {@link * #OrdsBlockTreeTermsWriter(SegmentWriteState,PostingsWriterBase,int,int)}. */ public static final int DEFAULT_MAX_BLOCK_SIZE = 48; // public final static boolean DEBUG = false; // private final static boolean SAVE_DOT_FILES = false; static final int OUTPUT_FLAGS_NUM_BITS = 2; static final int OUTPUT_FLAGS_MASK = 0x3; static final int OUTPUT_FLAG_IS_FLOOR = 0x1; static final int OUTPUT_FLAG_HAS_TERMS = 0x2; /** Extension of terms file */ static final String TERMS_EXTENSION = "tio"; static final String TERMS_CODEC_NAME = "OrdsBlockTreeTerms"; /** Initial terms format. */ public static final int VERSION_START = 1; /** Current terms format. */ public static final int VERSION_CURRENT = VERSION_START; /** Extension of terms index file */ static final String TERMS_INDEX_EXTENSION = "tipo"; static final String TERMS_INDEX_CODEC_NAME = "OrdsBlockTreeIndex"; private final IndexOutput out; private final IndexOutput indexOut; final int maxDoc; final int minItemsInBlock; final int maxItemsInBlock; final PostingsWriterBase postingsWriter; final FieldInfos fieldInfos; private record FieldMetaData( FieldInfo fieldInfo, Output rootCode, long numTerms, long indexStartFP, long sumTotalTermFreq, long sumDocFreq, int docCount, BytesRef minTerm, BytesRef maxTerm) { private FieldMetaData { assert numTerms > 0; assert rootCode != null : "field=" + fieldInfo.name + " numTerms=" + numTerms; } } private final List<FieldMetaData> fields = new ArrayList<>(); // private final String segment; /** * Create a new writer. The number of items (terms or sub-blocks) per block will aim to be between * minItemsPerBlock and maxItemsPerBlock, though in some cases the blocks may be smaller than the * min. */ public OrdsBlockTreeTermsWriter( SegmentWriteState state, PostingsWriterBase postingsWriter, int minItemsInBlock, int maxItemsInBlock) throws IOException { Lucene103BlockTreeTermsWriter.validateSettings(minItemsInBlock, maxItemsInBlock); maxDoc = state.segmentInfo.maxDoc(); final String termsFileName = IndexFileNames.segmentFileName( state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); out = state.directory.createOutput(termsFileName, state.context); IndexOutput indexOut = null; try { fieldInfos = state.fieldInfos; this.minItemsInBlock = minItemsInBlock; this.maxItemsInBlock = maxItemsInBlock; CodecUtil.writeIndexHeader( out, TERMS_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); final String termsIndexFileName = IndexFileNames.segmentFileName( state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); indexOut = state.directory.createOutput(termsIndexFileName, state.context); CodecUtil.writeIndexHeader( indexOut, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); this.postingsWriter = postingsWriter; // segment = state.segmentInfo.name; // System.out.println("BTW.init seg=" + state.segmentName); postingsWriter.init(out, state); // have consumer write its format/header } catch (Throwable t) { IOUtils.closeWhileSuppressingExceptions(t, out, indexOut); throw t; } this.indexOut = indexOut; } @Override public void write(Fields fields, NormsProducer norms) throws IOException { String lastField = null; for (String field : fields) { assert lastField == null || lastField.compareTo(field) < 0; lastField = field; Terms terms = fields.terms(field); if (terms == null) { continue; } TermsEnum termsEnum = terms.iterator(); TermsWriter termsWriter = new TermsWriter(fieldInfos.fieldInfo(field)); while (true) { BytesRef term = termsEnum.next(); if (term == null) { break; } termsWriter.write(term, termsEnum, norms); } termsWriter.finish(); } } static long encodeOutput(long fp, boolean hasTerms, boolean isFloor) { assert fp < (1L << 62); return (fp << 2) | (hasTerms ? OUTPUT_FLAG_HAS_TERMS : 0) | (isFloor ? OUTPUT_FLAG_IS_FLOOR : 0); } private static class PendingEntry { public final boolean isTerm; protected PendingEntry(boolean isTerm) { this.isTerm = isTerm; } } private static final class PendingTerm extends PendingEntry { public final byte[] termBytes; // stats + metadata public final BlockTermState state; public PendingTerm(BytesRef term, BlockTermState state) { super(true); this.termBytes = new byte[term.length]; System.arraycopy(term.bytes, term.offset, termBytes, 0, term.length); this.state = state; } @Override public String toString() { return ToStringUtils.bytesRefToString(termBytes); } } private record SubIndex(FST<Output> index, long termOrdStart) {} private static final class PendingBlock extends PendingEntry { public final BytesRef prefix; public final long fp; public FST<Output> index; public List<SubIndex> subIndices; public final boolean hasTerms; public final boolean isFloor; public final int floorLeadByte; public long totFloorTermCount; private final long totalTermCount; public PendingBlock( BytesRef prefix, long fp, boolean hasTerms, long totalTermCount, boolean isFloor, int floorLeadByte, List<SubIndex> subIndices) { super(false); this.prefix = prefix; this.fp = fp; this.hasTerms = hasTerms; this.totalTermCount = totalTermCount; assert totalTermCount > 0; this.isFloor = isFloor; this.floorLeadByte = floorLeadByte; this.subIndices = subIndices; } @Override public String toString() { return "BLOCK: " + ToStringUtils.bytesRefToString(prefix); } public void compileIndex( List<PendingBlock> blocks, ByteBuffersDataOutput scratchBytes, IntsRefBuilder scratchIntsRef) throws IOException { assert (isFloor && blocks.size() > 1) || (isFloor == false && blocks.size() == 1) : "isFloor=" + isFloor + " blocks=" + blocks; assert this == blocks.get(0); assert scratchBytes.size() == 0; // TODO: try writing the leading vLong in MSB order // (opposite of what Lucene does today), for better // outputs sharing in the FST long lastSumTotalTermCount = 0; long sumTotalTermCount = totalTermCount; scratchBytes.writeVLong(encodeOutput(fp, hasTerms, isFloor)); if (isFloor) { scratchBytes.writeVInt(blocks.size() - 1); for (int i = 1; i < blocks.size(); i++) { PendingBlock sub = blocks.get(i); assert sub.floorLeadByte != -1; // if (DEBUG) { // System.out.println(" write floorLeadByte=" + // Integer.toHexString(sub.floorLeadByte&0xff)); // } scratchBytes.writeByte((byte) sub.floorLeadByte); // System.out.println(" write floor byte=" + (byte) sub.floorLeadByte + " ordShift=" + // sumTotalTermCount); scratchBytes.writeVLong(sumTotalTermCount - lastSumTotalTermCount); lastSumTotalTermCount = sumTotalTermCount; sumTotalTermCount += sub.totalTermCount; assert sub.fp > fp; scratchBytes.writeVLong((sub.fp - fp) << 1 | (sub.hasTerms ? 1 : 0)); } } final FSTCompiler<Output> fstCompiler = new FSTCompiler.Builder<>(FST.INPUT_TYPE.BYTE1, FST_OUTPUTS).build(); // if (DEBUG) { // System.out.println(" compile index for prefix=" + prefix); // } // indexBuilder.DEBUG = false; final byte[] bytes = scratchBytes.toArrayCopy(); assert bytes.length > 0; fstCompiler.add( Util.toIntsRef(prefix, scratchIntsRef), FST_OUTPUTS.newOutput( new BytesRef(bytes, 0, bytes.length), 0, Long.MAX_VALUE - (sumTotalTermCount - 1))); scratchBytes.reset(); // Copy over index for all sub-blocks long termOrdOffset = 0; for (PendingBlock block : blocks) { if (block.subIndices != null) { for (SubIndex subIndex : block.subIndices) { append( fstCompiler, subIndex.index, termOrdOffset + subIndex.termOrdStart, scratchIntsRef); } block.subIndices = null; } termOrdOffset += block.totalTermCount; } totFloorTermCount = termOrdOffset; assert sumTotalTermCount == totFloorTermCount; index = FST.fromFSTReader(fstCompiler.compile(), fstCompiler.getFSTReader()); assert subIndices == null; /* Writer w = new OutputStreamWriter(new FileOutputStream("out.dot")); Util.toDot(index, w, false, false); System.out.println("SAVED to out.dot"); w.close(); */ } // TODO: maybe we could add bulk-add method to // Builder? Takes FST and unions it w/ current // FST. private void append( FSTCompiler<Output> fstCompiler, FST<Output> subIndex, long termOrdOffset, IntsRefBuilder scratchIntsRef) throws IOException { final BytesRefFSTEnum<Output> subIndexEnum = new BytesRefFSTEnum<>(subIndex); BytesRefFSTEnum.InputOutput<Output> indexEnt; while ((indexEnt = subIndexEnum.next()) != null) { // if (DEBUG) { // System.out.println(" add sub=" + indexEnt.input + " " + indexEnt.input + " output=" // + indexEnt.output); // } Output output = indexEnt.output; // long blockTermCount = output.endOrd - output.startOrd + 1; Output newOutput = FST_OUTPUTS.newOutput( output.bytes(), termOrdOffset + output.startOrd(), output.endOrd() - termOrdOffset); // System.out.println(" append sub=" + indexEnt.input + " output=" + indexEnt.output + // " termOrdOffset=" + termOrdOffset + " blockTermCount=" + blockTermCount + " newOutput=" // + newOutput + " endOrd=" + (termOrdOffset+Long.MAX_VALUE-output.endOrd)); fstCompiler.add(Util.toIntsRef(indexEnt.input, scratchIntsRef), newOutput); } } } private final ByteBuffersDataOutput scratchBytes = ByteBuffersDataOutput.newResettableInstance(); private final IntsRefBuilder scratchIntsRef = new IntsRefBuilder(); class TermsWriter { private final FieldInfo fieldInfo; private long numTerms; final FixedBitSet docsSeen; long sumTotalTermFreq; long sumDocFreq; long indexStartFP; // Records index into pending where the current prefix at that // length "started"; for example, if current term starts with 't', // startsByPrefix[0] is the index into pending for the first // term/sub-block starting with 't'. We use this to figure out when // to write a new block: private final BytesRefBuilder lastTerm = new BytesRefBuilder(); private int[] prefixStarts = new int[8]; // Pending stack of terms and blocks. As terms arrive (in sorted order) // we append to this stack, and once the top of the stack has enough // terms starting with a common prefix, we write a new block with // those terms and replace those terms in the stack with a new block: private final List<PendingEntry> pending = new ArrayList<>(); // Reused in writeBlocks: private final List<PendingBlock> newBlocks = new ArrayList<>(); private PendingTerm firstPendingTerm; private PendingTerm lastPendingTerm; /** Writes the top count entries in pending, using prevTerm to compute the prefix. */ void writeBlocks(int prefixLength, int count) throws IOException { assert count > 0; /* if (DEBUG) { BytesRef br = new BytesRef(lastTerm.bytes); br.offset = lastTerm.offset; br.length = prefixLength; System.out.println("writeBlocks: " + br.utf8ToString() + " count=" + count); } */ // Root block better write all remaining pending entries: assert prefixLength > 0 || count == pending.size(); int lastSuffixLeadLabel = -1; // True if we saw at least one term in this block (we record if a block // only points to sub-blocks in the terms index so we can avoid seeking // to it when we are looking for a term): boolean hasTerms = false; boolean hasSubBlocks = false; int start = pending.size() - count; int end = pending.size(); int nextBlockStart = start; int nextFloorLeadLabel = -1; for (int i = start; i < end; i++) { PendingEntry ent = pending.get(i); int suffixLeadLabel; if (ent.isTerm) { PendingTerm term = (PendingTerm) ent; if (term.termBytes.length == prefixLength) { // Suffix is 0, i.e. prefix 'foo' and term is // 'foo' so the term has empty string suffix // in this block assert lastSuffixLeadLabel == -1; suffixLeadLabel = -1; } else { suffixLeadLabel = term.termBytes[prefixLength] & 0xff; } } else { PendingBlock block = (PendingBlock) ent; assert block.prefix.length > prefixLength; suffixLeadLabel = block.prefix.bytes[block.prefix.offset + prefixLength] & 0xff; } // if (DEBUG) System.out.println(" i=" + i + " ent=" + ent + " suffixLeadLabel=" + // suffixLeadLabel); if (suffixLeadLabel != lastSuffixLeadLabel) { int itemsInBlock = i - nextBlockStart; if (itemsInBlock >= minItemsInBlock && end - nextBlockStart > maxItemsInBlock) { // The count is too large for one block, so we must break it into "floor" blocks, where // we record // the leading label of the suffix of the first term in each floor block, so at search // time we can // jump to the right floor block. We just use a naive greedy segmenter here: make a new // floor // block as soon as we have at least minItemsInBlock. This is not always best: it often // produces // a too-small block as the final block: boolean isFloor = itemsInBlock < count; newBlocks.add( writeBlock( prefixLength, isFloor, nextFloorLeadLabel, nextBlockStart, i, hasTerms, hasSubBlocks)); hasTerms = false; hasSubBlocks = false; nextFloorLeadLabel = suffixLeadLabel; nextBlockStart = i; } lastSuffixLeadLabel = suffixLeadLabel; } if (ent.isTerm) { hasTerms = true; } else { hasSubBlocks = true; } } // Write last block, if any: if (nextBlockStart < end) { int itemsInBlock = end - nextBlockStart; boolean isFloor = itemsInBlock < count; newBlocks.add( writeBlock( prefixLength, isFloor, nextFloorLeadLabel, nextBlockStart, end, hasTerms, hasSubBlocks)); } assert newBlocks.isEmpty() == false; PendingBlock firstBlock = newBlocks.get(0); assert firstBlock.isFloor || newBlocks.size() == 1; firstBlock.compileIndex(newBlocks, scratchBytes, scratchIntsRef); // Remove slice from the top of the pending stack, that we just wrote: pending.subList(pending.size() - count, pending.size()).clear(); // Append new block pending.add(firstBlock); newBlocks.clear(); } /** * Writes the specified slice (start is inclusive, end is exclusive) from pending stack as a new * block. If isFloor is true, there were too many (more than maxItemsInBlock) entries sharing * the same prefix, and so we broke it into multiple floor blocks where we record the starting * label of the suffix of each floor block. */ private PendingBlock writeBlock( int prefixLength, boolean isFloor, int floorLeadLabel, int start, int end, boolean hasTerms, boolean hasSubBlocks) throws IOException { assert end > start; long startFP = out.getFilePointer(); // if (DEBUG) System.out.println(" writeBlock fp=" + startFP + " isFloor=" + isFloor + // " floorLeadLabel=" + floorLeadLabel + " start=" + start + " end=" + end + " hasTerms=" + // hasTerms + " hasSubBlocks=" + hasSubBlocks); boolean hasFloorLeadLabel = isFloor && floorLeadLabel != -1; final BytesRef prefix = new BytesRef(prefixLength + (hasFloorLeadLabel ? 1 : 0)); System.arraycopy(lastTerm.bytes(), 0, prefix.bytes, 0, prefixLength); prefix.length = prefixLength; // Write block header: int numEntries = end - start; int code = numEntries << 1; if (end == pending.size()) { // Last block: code |= 1; } out.writeVInt(code); // if (DEBUG) { // System.out.println(" writeBlock " + (isFloor ? "(floor) " : "") + "seg=" + segment + // " pending.size()=" + pending.size() + " prefixLength=" + prefixLength + " indexPrefix=" + // ToStringUtils.bytesRefToString(prefix) + " entCount=" + length + " startFP=" + startFP + // (isFloor ? (" floorLeadByte=" + Integer.toHexString(floorLeadByte&0xff)) : "") + // " isLastInFloor=" + isLastInFloor); // } final List<SubIndex> subIndices; // We optimize the leaf block case (block has only terms), writing a more // compact format in this case: boolean isLeafBlock = hasSubBlocks == false; // Number of terms in this block and all sub-blocks (recursively) long totalTermCount; boolean absolute = true; if (isLeafBlock) { // Only terms: subIndices = null; for (int i = start; i < end; i++) { PendingEntry ent = pending.get(i); assert ent.isTerm : "i=" + i; PendingTerm term = (PendingTerm) ent; assert StringHelper.startsWith(term.termBytes, prefix) : term + " prefix=" + prefix; BlockTermState state = term.state; final int suffix = term.termBytes.length - prefixLength; /* if (DEBUG) { BytesRef suffixBytes = new BytesRef(suffix); System.arraycopy(term.term.bytes, prefixLength, suffixBytes.bytes, 0, suffix); suffixBytes.length = suffix; System.out.println(" write term suffix=" + suffixBytes); } */ // For leaf block we write suffix straight suffixWriter.writeVInt(suffix); suffixWriter.writeBytes(term.termBytes, prefixLength, suffix); assert floorLeadLabel == -1 || (term.termBytes[prefixLength] & 0xff) >= floorLeadLabel; // Write term stats, to separate byte[] blob: statsWriter.writeVInt(state.docFreq); if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) { assert state.totalTermFreq >= state.docFreq : state.totalTermFreq + " vs " + state.docFreq; statsWriter.writeVLong(state.totalTermFreq - state.docFreq); } // Write term meta data postingsWriter.encodeTerm(metaWriter, fieldInfo, state, absolute); absolute = false; } totalTermCount = end - start; } else { // Mixed terms and sub-blocks: subIndices = new ArrayList<>(); totalTermCount = 0; for (int i = start; i < end; i++) { PendingEntry ent = pending.get(i); if (ent.isTerm) { PendingTerm term = (PendingTerm) ent; assert StringHelper.startsWith(term.termBytes, prefix) : term + " prefix=" + prefix; BlockTermState state = term.state; final int suffix = term.termBytes.length - prefixLength; /* if (DEBUG) { BytesRef suffixBytes = new BytesRef(suffix); System.arraycopy(term.term.bytes, prefixLength, suffixBytes.bytes, 0, suffix); suffixBytes.length = suffix; System.out.println(" write term suffix=" + suffixBytes); } */ // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block suffixWriter.writeVInt(suffix << 1); suffixWriter.writeBytes(term.termBytes, prefixLength, suffix); assert floorLeadLabel == -1 || (term.termBytes[prefixLength] & 0xff) >= floorLeadLabel; // Write term stats, to separate byte[] blob: statsWriter.writeVInt(state.docFreq); if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) { assert state.totalTermFreq >= state.docFreq; statsWriter.writeVLong(state.totalTermFreq - state.docFreq); } // TODO: now that terms dict "sees" these longs, // we can explore better column-stride encodings // to encode all long[0]s for this block at // once, all long[1]s, etc., e.g. using // Simple64. Alternatively, we could interleave // stats + meta ... no reason to have them // separate anymore: // Write term meta data postingsWriter.encodeTerm(metaWriter, fieldInfo, state, absolute); absolute = false; totalTermCount++; } else { PendingBlock block = (PendingBlock) ent; assert StringHelper.startsWith(block.prefix, prefix); final int suffix = block.prefix.length - prefixLength; assert suffix > 0; // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block suffixWriter.writeVInt((suffix << 1) | 1); suffixWriter.writeBytes(block.prefix.bytes, prefixLength, suffix); assert floorLeadLabel == -1 || (block.prefix.bytes[prefixLength] & 0xff) >= floorLeadLabel; assert block.fp < startFP; /* if (DEBUG) { BytesRef suffixBytes = new BytesRef(suffix); System.arraycopy(block.prefix.bytes, prefixLength, suffixBytes.bytes, 0, suffix); suffixBytes.length = suffix; System.out.println(" write sub-block suffix=" + ToStringUtils.bytesRefToString(suffixBytes) + " subFP=" + block.fp + " subCode=" + (startFP-block.fp) + " floor=" + block.isFloor); } */ suffixWriter.writeVLong(startFP - block.fp); suffixWriter.writeVLong(block.totFloorTermCount); subIndices.add(new SubIndex(block.index, totalTermCount)); totalTermCount += block.totFloorTermCount; } } assert subIndices.size() != 0; } // TODO: we could block-write the term suffix pointers; // this would take more space but would enable binary // search on lookup // Write suffixes byte[] blob to terms dict output: out.writeVInt((int) (suffixWriter.size() << 1) | (isLeafBlock ? 1 : 0)); suffixWriter.copyTo(out); suffixWriter.reset(); // Write term stats byte[] blob out.writeVInt((int) statsWriter.size()); statsWriter.copyTo(out); statsWriter.reset(); // Write term meta data byte[] blob out.writeVInt((int) metaWriter.size()); metaWriter.copyTo(out); metaWriter.reset(); // if (DEBUG) { // System.out.println(" fpEnd=" + out.getFilePointer()); // } if (hasFloorLeadLabel) { // We already allocated to length+1 above: prefix.bytes[prefix.length++] = (byte) floorLeadLabel; } return new PendingBlock( prefix, startFP, hasTerms, totalTermCount, isFloor, floorLeadLabel, subIndices); } TermsWriter(FieldInfo fieldInfo) { this.fieldInfo = fieldInfo; docsSeen = new FixedBitSet(maxDoc); postingsWriter.setField(fieldInfo); } /** Writes one term's worth of postings. */ public void write(BytesRef text, TermsEnum termsEnum, NormsProducer norms) throws IOException { /* if (DEBUG) { int[] tmp = new int[lastTerm.length]; System.arraycopy(prefixStarts, 0, tmp, 0, tmp.length); System.out.println("BTTW: write term=" + ToStringUtils.bytesRefToString(text) + " prefixStarts=" + Arrays.toString(tmp) + " pending.size()=" + pending.size()); } */ BlockTermState state = postingsWriter.writeTerm(text, termsEnum, docsSeen, norms); if (state != null) { assert state.docFreq != 0; assert fieldInfo.getIndexOptions() == IndexOptions.DOCS || state.totalTermFreq >= state.docFreq : "postingsWriter=" + postingsWriter; sumDocFreq += state.docFreq; sumTotalTermFreq += state.totalTermFreq; pushTerm(text); PendingTerm term = new PendingTerm(BytesRef.deepCopyOf(text), state); pending.add(term); numTerms++; if (firstPendingTerm == null) { firstPendingTerm = term; } lastPendingTerm = term; } } /** Pushes the new term to the top of the stack, and writes new blocks. */ private void pushTerm(BytesRef text) throws IOException { int limit = Math.min(lastTerm.length(), text.length); // Find common prefix between last term and current term: int pos = 0; while (pos < limit && lastTerm.byteAt(pos) == text.bytes[text.offset + pos]) { pos++; } // if (DEBUG) System.out.println(" shared=" + pos + " lastTerm.length=" + lastTerm.length); // Close the "abandoned" suffix now: for (int i = lastTerm.length() - 1; i >= pos; i--) { // How many items on top of the stack share the current suffix // we are closing: int prefixTopSize = pending.size() - prefixStarts[i]; if (prefixTopSize >= minItemsInBlock) { // if (DEBUG) System.out.println("pushTerm i=" + i + " prefixTopSize=" + prefixTopSize + // " minItemsInBlock=" + minItemsInBlock); writeBlocks(i + 1, prefixTopSize); prefixStarts[i] -= prefixTopSize - 1; } } if (prefixStarts.length < text.length) { prefixStarts = ArrayUtil.grow(prefixStarts, text.length); } // Init new tail: for (int i = pos; i < text.length; i++) { prefixStarts[i] = pending.size(); } lastTerm.copyBytes(text); } // Finishes all terms in this field public void finish() throws IOException { if (numTerms > 0) { // if (DEBUG) System.out.println("BTTW.finish pending.size()=" + pending.size()); // TODO: if pending.size() is already 1 with a non-zero prefix length // we can save writing a "degenerate" root block, but we have to // fix all the places that assume the root block's prefix is the empty string: writeBlocks(0, pending.size()); // We better have one final "root" block: assert pending.size() == 1 && !pending.get(0).isTerm : "pending.size()=" + pending.size() + " pending=" + pending; final PendingBlock root = (PendingBlock) pending.get(0); assert root.prefix.length == 0; assert root.index.getEmptyOutput() != null; // Write FST to index indexStartFP = indexOut.getFilePointer(); root.index.save(indexOut, indexOut); // System.out.println(" write FST " + indexStartFP + " field=" + fieldInfo.name); // if (SAVE_DOT_FILES || DEBUG) { // final String dotFileName = segment + "_" + fieldInfo.name + ".dot"; // Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName)); // Util.toDot(root.index, w, false, false); // System.out.println("SAVED to " + dotFileName); // w.close(); // } assert firstPendingTerm != null; BytesRef minTerm = new BytesRef(firstPendingTerm.termBytes); assert lastPendingTerm != null; BytesRef maxTerm = new BytesRef(lastPendingTerm.termBytes); fields.add( new FieldMetaData( fieldInfo, ((PendingBlock) pending.get(0)).index.getEmptyOutput(), numTerms, indexStartFP, sumTotalTermFreq, sumDocFreq, docsSeen.cardinality(), minTerm, maxTerm)); } else { assert docsSeen.cardinality() == 0; } } private final ByteBuffersDataOutput suffixWriter = ByteBuffersDataOutput.newResettableInstance(); private final ByteBuffersDataOutput statsWriter = ByteBuffersDataOutput.newResettableInstance(); private final ByteBuffersDataOutput metaWriter = ByteBuffersDataOutput.newResettableInstance(); } private boolean closed; @Override public void close() throws IOException { if (closed) { return; } closed = true; try (out; indexOut; postingsWriter) { final long dirStart = out.getFilePointer(); final long indexDirStart = indexOut.getFilePointer(); out.writeVInt(fields.size()); for (FieldMetaData field : fields) { // System.out.println(" field " + field.fieldInfo.name + " " + field.numTerms + " terms // longsSize=" + field.longsSize); out.writeVInt(field.fieldInfo.number); assert field.numTerms > 0; out.writeVLong(field.numTerms); out.writeVInt(field.rootCode.bytes().length); out.writeBytes( field.rootCode.bytes().bytes, field.rootCode.bytes().offset, field.rootCode.bytes().length); if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) { out.writeVLong(field.sumTotalTermFreq); } out.writeVLong(field.sumDocFreq); out.writeVInt(field.docCount); indexOut.writeVLong(field.indexStartFP); writeBytesRef(out, field.minTerm); writeBytesRef(out, field.maxTerm); } out.writeLong(dirStart); CodecUtil.writeFooter(out); indexOut.writeLong(indexDirStart); CodecUtil.writeFooter(indexOut); } } private static void writeBytesRef(IndexOutput out, BytesRef bytes) throws IOException { out.writeVInt(bytes.length); out.writeBytes(bytes.bytes, bytes.offset, bytes.length); } }
apache/samza
35,901
samza-core/src/main/java/org/apache/samza/storage/blobstore/util/BlobStoreUtil.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.storage.blobstore.util; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutorService; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.zip.CRC32; import java.util.zip.CheckedInputStream; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.samza.SamzaException; import org.apache.samza.checkpoint.Checkpoint; import org.apache.samza.checkpoint.CheckpointV2; import org.apache.samza.config.BlobStoreConfig; import org.apache.samza.storage.blobstore.BlobStoreManager; import org.apache.samza.storage.blobstore.BlobStoreStateBackendFactory; import org.apache.samza.storage.blobstore.Metadata; import org.apache.samza.storage.blobstore.diff.DirDiff; import org.apache.samza.storage.blobstore.exceptions.DeletedException; import org.apache.samza.storage.blobstore.exceptions.RetriableException; import org.apache.samza.storage.blobstore.index.DirIndex; import org.apache.samza.storage.blobstore.index.FileBlob; import org.apache.samza.storage.blobstore.index.FileIndex; import org.apache.samza.storage.blobstore.index.FileMetadata; import org.apache.samza.storage.blobstore.index.SnapshotIndex; import org.apache.samza.storage.blobstore.index.SnapshotMetadata; import org.apache.samza.storage.blobstore.index.serde.SnapshotIndexSerde; import org.apache.samza.storage.blobstore.metrics.BlobStoreBackupManagerMetrics; import org.apache.samza.storage.blobstore.metrics.BlobStoreRestoreManagerMetrics; import org.apache.samza.util.FutureUtil; import org.apache.samza.util.RetryPolicyConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Helper methods to interact with remote blob store service and GET/PUT/DELETE a * {@link SnapshotIndex} or {@link DirDiff}. */ public class BlobStoreUtil { private static final Logger LOG = LoggerFactory.getLogger(BlobStoreUtil.class); private final BlobStoreManager blobStoreManager; private final ExecutorService executor; private final BlobStoreConfig blobStoreConfig; private final BlobStoreBackupManagerMetrics backupMetrics; private final BlobStoreRestoreManagerMetrics restoreMetrics; private final SnapshotIndexSerde snapshotIndexSerde; private final RetryPolicyConfig retryPolicyConfig; public BlobStoreUtil(BlobStoreManager blobStoreManager, ExecutorService executor, BlobStoreConfig blobStoreConfig, BlobStoreBackupManagerMetrics backupMetrics, BlobStoreRestoreManagerMetrics restoreMetrics) { this.blobStoreManager = blobStoreManager; this.executor = executor; this.blobStoreConfig = blobStoreConfig; this.backupMetrics = backupMetrics; this.restoreMetrics = restoreMetrics; this.snapshotIndexSerde = new SnapshotIndexSerde(); this.retryPolicyConfig = this.blobStoreConfig.getRetryPolicyConfig(); } /** * Get the blob id of {@link SnapshotIndex} and {@link SnapshotIndex}es for the provided {@code task} * in the provided {@code checkpoint}. * @param jobName job name is used to build request metadata * @param jobId job id is used to build request metadata * @param taskName task name to get the store state checkpoint markers and snapshot indexes for * @param checkpoint {@link Checkpoint} instance to get the store state checkpoint markers from. Only * {@link CheckpointV2} and newer are supported for blob stores. * @param storesToBackupOrRestore set of store names to be backed up or restored * @param getDeleted tries gets a deleted but not yet compacted SnapshotIndex from the blob store. * @return Map of store name to its blob id of snapshot indices and their corresponding snapshot indices for the task. */ public Map<String, Pair<String, SnapshotIndex>> getStoreSnapshotIndexes(String jobName, String jobId, String taskName, Checkpoint checkpoint, Set<String> storesToBackupOrRestore, boolean getDeleted) { //TODO MED shesharma document error handling (checkpoint ver, blob not found, getBlob) if (checkpoint == null) { LOG.debug("No previous checkpoint found for taskName: {}", taskName); return ImmutableMap.of(); } if (checkpoint.getVersion() == 1) { LOG.warn("Checkpoint version 1 is not supported for blob store backup and restore."); return ImmutableMap.of(); } Map<String, CompletableFuture<Pair<String, SnapshotIndex>>> storeSnapshotIndexFutures = new HashMap<>(); CheckpointV2 checkpointV2 = (CheckpointV2) checkpoint; Map<String, Map<String, String>> factoryToStoreSCMs = checkpointV2.getStateCheckpointMarkers(); Map<String, String> storeSnapshotIndexBlobIds = factoryToStoreSCMs.get(BlobStoreStateBackendFactory.class.getName()); if (storeSnapshotIndexBlobIds != null) { storeSnapshotIndexBlobIds.forEach((storeName, snapshotIndexBlobId) -> { if (storesToBackupOrRestore.contains(storeName)) { try { LOG.debug("Getting snapshot index for taskName: {} store: {} blobId: {} with getDeleted set to {}", taskName, storeName, snapshotIndexBlobId, getDeleted); Metadata requestMetadata = new Metadata(Metadata.SNAPSHOT_INDEX_PAYLOAD_PATH, Optional.empty(), jobName, jobId, taskName, storeName); CompletableFuture<SnapshotIndex> snapshotIndexFuture = getSnapshotIndex(snapshotIndexBlobId, requestMetadata, getDeleted).toCompletableFuture(); Pair<CompletableFuture<String>, CompletableFuture<SnapshotIndex>> pairOfFutures = Pair.of(CompletableFuture.completedFuture(snapshotIndexBlobId), snapshotIndexFuture); // save the future and block once in the end instead of blocking for each request. storeSnapshotIndexFutures.put(storeName, FutureUtil.toFutureOfPair(pairOfFutures)); } catch (Exception e) { throw new SamzaException( String.format("Error getting SnapshotIndex for blobId: %s for taskName: %s store: %s", snapshotIndexBlobId, taskName, storeName), e); } } else { LOG.debug("SnapshotIndex blob id {} for store {} is not present in the set of stores to be backed up/restores: {}", snapshotIndexBlobId, storeName, storesToBackupOrRestore); } }); } else { LOG.debug("No store SCMs found for blob store state backend in for taskName: {} in checkpoint {}", taskName, checkpointV2.getCheckpointId()); } try { return FutureUtil.toFutureOfMap(storeSnapshotIndexFutures).join(); } catch (Exception e) { throw new SamzaException( String.format("Error while waiting to get store snapshot indexes for task %s", taskName), e); } } /** * GETs the {@link SnapshotIndex} from the blob store. * @param blobId blob ID of the {@link SnapshotIndex} to get * @return a Future containing the {@link SnapshotIndex} */ public CompletableFuture<SnapshotIndex> getSnapshotIndex(String blobId, Metadata metadata, boolean getDeleted) { Preconditions.checkState(StringUtils.isNotBlank(blobId)); String opName = "getSnapshotIndex: " + blobId; return FutureUtil.executeAsyncWithRetries(opName, () -> { ByteArrayOutputStream indexBlobStream = new ByteArrayOutputStream(); // no need to close ByteArrayOutputStream return blobStoreManager.get(blobId, indexBlobStream, metadata, getDeleted).toCompletableFuture() .thenApplyAsync(f -> snapshotIndexSerde.fromBytes(indexBlobStream.toByteArray()), executor) .handle((snapshotIndex, ex) -> { if (ex != null) { throw new SamzaException(String.format("Unable to get SnapshotIndex blob. The blob ID is : %s", blobId), ex); } return snapshotIndex; }); }, isCauseNonRetriable(), executor, retryPolicyConfig); } /** * PUTs the {@link SnapshotIndex} to the blob store. * @param snapshotIndex SnapshotIndex to put. * @return a Future containing the blob ID of the {@link SnapshotIndex}. */ public CompletableFuture<String> putSnapshotIndex(SnapshotIndex snapshotIndex) { byte[] bytes = snapshotIndexSerde.toBytes(snapshotIndex); String opName = "putSnapshotIndex for checkpointId: " + snapshotIndex.getSnapshotMetadata().getCheckpointId(); return FutureUtil.executeAsyncWithRetries(opName, () -> { InputStream inputStream = new ByteArrayInputStream(bytes); // no need to close ByteArrayInputStream SnapshotMetadata snapshotMetadata = snapshotIndex.getSnapshotMetadata(); Metadata metadata = new Metadata(Metadata.SNAPSHOT_INDEX_PAYLOAD_PATH, Optional.of((long) bytes.length), snapshotMetadata.getJobName(), snapshotMetadata.getJobId(), snapshotMetadata.getTaskName(), snapshotMetadata.getStoreName()); return blobStoreManager.put(inputStream, metadata).toCompletableFuture(); }, isCauseNonRetriable(), executor, retryPolicyConfig); } /** * Gets SnapshotIndex blob, cleans up a SnapshotIndex by recursively deleting all blobs associated with files/subdirs * inside the SnapshotIndex and finally deletes SnapshotIndex blob itself. * @param snapshotIndexBlobId Blob id of SnapshotIndex * @param requestMetadata Metadata of the request * @param getDeleted Determines whether to try to get deleted SnapshotIndex or not. */ public CompletionStage<Void> cleanSnapshotIndex(String snapshotIndexBlobId, Metadata requestMetadata, boolean getDeleted) { Metadata getSnapshotRequest = new Metadata(Metadata.SNAPSHOT_INDEX_PAYLOAD_PATH, Optional.empty(), requestMetadata.getJobName(), requestMetadata.getJobId(), requestMetadata.getTaskName(), requestMetadata.getStoreName()); return getSnapshotIndex(snapshotIndexBlobId, getSnapshotRequest, getDeleted) .thenCompose(snapshotIndex -> cleanSnapshotIndex(snapshotIndexBlobId, snapshotIndex, requestMetadata)); } /** * Cleans up a SnapshotIndex by recursively deleting all blobs associated with files/subdirs inside the SnapshotIndex * and finally deletes SnapshotIndex blob itself. * @param snapshotIndexBlobId Blob if of SnapshotIndex * @param snapshotIndex SnapshotIndex to delete * @param requestMetadata Metadata of the request */ public CompletionStage<Void> cleanSnapshotIndex(String snapshotIndexBlobId, SnapshotIndex snapshotIndex, Metadata requestMetadata) { DirIndex dirIndex = snapshotIndex.getDirIndex(); CompletionStage<Void> storeDeletionFuture = cleanUpDir(dirIndex, requestMetadata) // delete files and sub-dirs previously marked for removal .thenComposeAsync(v -> deleteDir(dirIndex, requestMetadata), executor) // deleted files and dirs still present .thenComposeAsync(v -> deleteSnapshotIndexBlob(snapshotIndexBlobId, requestMetadata), executor) // delete the snapshot index blob .exceptionally(ex -> { Throwable unwrappedException = FutureUtil.unwrapExceptions(CompletionException.class, FutureUtil.unwrapExceptions(SamzaException.class, ex)); // If a blob is already deleted, do not fail -> this may happen if after we restore a // deleted checkpoint and then try to clean up old checkpoint. if (unwrappedException instanceof DeletedException) { LOG.warn("Request {} received DeletedException on trying to clean up SnapshotIndex {}. Ignoring the error.", requestMetadata, snapshotIndexBlobId); return null; } String msg = String.format("Request %s received error deleting/cleaning up SnapshotIndex: %s", requestMetadata, snapshotIndexBlobId); throw new SamzaException(msg, ex); }); return storeDeletionFuture; } /** * WARNING: This method deletes the **SnapshotIndex blob** from the snapshot. This should only be called to clean * up an older snapshot **AFTER** all the files and sub-dirs to be deleted from this snapshot are already deleted * using {@link #cleanUpDir(DirIndex, Metadata)} * * @param snapshotIndexBlobId blob ID of SnapshotIndex blob to delete * @return a future that completes when the index blob is deleted from remote store. */ public CompletionStage<Void> deleteSnapshotIndexBlob(String snapshotIndexBlobId, Metadata metadata) { Preconditions.checkState(StringUtils.isNotBlank(snapshotIndexBlobId)); LOG.debug("Deleting SnapshotIndex blob: {} from blob store", snapshotIndexBlobId); String opName = "deleteSnapshotIndexBlob: " + snapshotIndexBlobId; return FutureUtil.executeAsyncWithRetries(opName, () -> blobStoreManager.delete(snapshotIndexBlobId, metadata).toCompletableFuture(), isCauseNonRetriable(), executor, retryPolicyConfig); } /** * Non-blocking restore of a {@link SnapshotIndex} to local store by downloading all the files and sub-dirs associated * with this remote snapshot. * NOTE: getDeleted flag sets if it reattempts to get a deleted file by setting getDeleted flag in getFiles. * @return A future that completes when all the async downloads completes */ public CompletableFuture<Void> restoreDir(File baseDir, DirIndex dirIndex, Metadata metadata, boolean getDeleted) { LOG.debug("Restoring contents of directory: {} from remote snapshot. GetDeletedFiles set to: {}", baseDir, getDeleted); List<CompletableFuture<Void>> downloadFutures = new ArrayList<>(); try { // create parent directories if they don't exist Files.createDirectories(baseDir.toPath()); } catch (IOException exception) { LOG.error("Error creating directory: {} for restore", baseDir.getAbsolutePath(), exception); throw new SamzaException(String.format("Error creating directory: %s for restore", baseDir.getAbsolutePath()), exception); } // restore all files in the directory for (FileIndex fileIndex : dirIndex.getFilesPresent()) { File fileToRestore = Paths.get(baseDir.getAbsolutePath(), fileIndex.getFileName()).toFile(); Metadata requestMetadata = new Metadata(fileToRestore.getAbsolutePath(), Optional.of(fileIndex.getFileMetadata().getSize()), metadata.getJobName(), metadata.getJobId(), metadata.getTaskName(), metadata.getStoreName()); List<FileBlob> fileBlobs = fileIndex.getBlobs(); String opName = "restoreFile: " + fileToRestore.getAbsolutePath(); CompletableFuture<Void> fileRestoreFuture = FutureUtil.executeAsyncWithRetries(opName, () -> getFile(fileBlobs, fileToRestore, requestMetadata, getDeleted), isCauseNonRetriable(), executor, retryPolicyConfig); downloadFutures.add(fileRestoreFuture); } // restore any sub-directories List<DirIndex> subDirs = dirIndex.getSubDirsPresent(); for (DirIndex subDir : subDirs) { File subDirFile = Paths.get(baseDir.getAbsolutePath(), subDir.getDirName()).toFile(); downloadFutures.add(restoreDir(subDirFile, subDir, metadata, getDeleted)); } return FutureUtil.allOf(downloadFutures); } /** * Recursively upload all new files and upload or update contents of all subdirs in the {@link DirDiff} and return a * Future containing the {@link DirIndex} associated with the directory. * @param dirDiff diff for the contents of this directory * @return A future with the {@link DirIndex} if the upload completed successfully. */ public CompletionStage<DirIndex> putDir(DirDiff dirDiff, SnapshotMetadata snapshotMetadata) { // Upload all new files in the dir List<File> filesToUpload = dirDiff.getFilesAdded(); List<CompletionStage<FileIndex>> fileFutures = filesToUpload.stream() .map(file -> putFile(file, snapshotMetadata)) .collect(Collectors.toList()); CompletableFuture<Void> allFilesFuture = CompletableFuture.allOf(fileFutures.toArray(new CompletableFuture[0])); List<CompletionStage<DirIndex>> subDirFutures = new ArrayList<>(); // recursively upload all new subdirs of this dir for (DirDiff subDirAdded: dirDiff.getSubDirsAdded()) { subDirFutures.add(putDir(subDirAdded, snapshotMetadata)); } // recursively update contents of all subdirs that are retained but might have been modified for (DirDiff subDirRetained: dirDiff.getSubDirsRetained()) { subDirFutures.add(putDir(subDirRetained, snapshotMetadata)); } CompletableFuture<Void> allDirBlobsFuture = CompletableFuture.allOf(subDirFutures.toArray(new CompletableFuture[0])); return CompletableFuture.allOf(allDirBlobsFuture, allFilesFuture) .thenApplyAsync(f -> { LOG.trace("All file and dir uploads complete for task: {} store: {}", snapshotMetadata.getTaskName(), snapshotMetadata.getStoreName()); List<FileIndex> filesPresent = fileFutures.stream() .map(blob -> blob.toCompletableFuture().join()) .collect(Collectors.toList()); filesPresent.addAll(dirDiff.getFilesRetained()); List<DirIndex> subDirsPresent = subDirFutures.stream() .map(subDir -> subDir.toCompletableFuture().join()) .collect(Collectors.toList()); LOG.debug("Uploaded diff for task: {} store: {} with statistics: {}", snapshotMetadata.getTaskName(), snapshotMetadata.getStoreName(), DirDiff.getStats(dirDiff)); LOG.trace("Returning new DirIndex for task: {} store: {}", snapshotMetadata.getTaskName(), snapshotMetadata.getStoreName()); return new DirIndex(dirDiff.getDirName(), filesPresent, dirDiff.getFilesRemoved(), subDirsPresent, dirDiff.getSubDirsRemoved()); }, executor); } /** * WARNING: Recursively delete **ALL** the associated files and subdirs within the provided {@link DirIndex}. * @param dirIndex {@link DirIndex} whose entire contents are to be deleted. * @param metadata {@link Metadata} related to the request * @return a future that completes when ALL the files and subdirs associated with the dirIndex have been * marked for deleted in the remote blob store. */ public CompletionStage<Void> deleteDir(DirIndex dirIndex, Metadata metadata) { LOG.debug("Completely deleting dir: {} in blob store", dirIndex.getDirName()); List<CompletionStage<Void>> deleteFutures = new ArrayList<>(); // Delete all files present in subDir for (FileIndex file: dirIndex.getFilesPresent()) { Metadata requestMetadata = new Metadata(file.getFileName(), Optional.of(file.getFileMetadata().getSize()), metadata.getJobName(), metadata.getJobId(), metadata.getTaskName(), metadata.getStoreName()); deleteFutures.add(deleteFile(file, requestMetadata)); } // Delete all subDirs present recursively for (DirIndex subDir: dirIndex.getSubDirsPresent()) { deleteFutures.add(deleteDir(subDir, metadata)); } return CompletableFuture.allOf(deleteFutures.toArray(new CompletableFuture[0])); } /** * Recursively issue delete requests for files and dirs marked to be removed in a previously created remote snapshot. * Note: We do not immediately delete files/dirs to be removed when uploading a snapshot to the remote * store. We just track them for deletion during the upload, and delete them AFTER the snapshot is uploaded, and the * blob IDs have been persisted as part of the checkpoint. This is to prevent data loss if a failure happens * part way through the commit. We issue delete these file/subdirs in cleanUp() phase of commit lifecycle. * @param dirIndex the dir in the remote snapshot to clean up. * @param metadata Metadata related to the request * @return a future that completes when all the files and subdirs marked for deletion are cleaned up. */ public CompletionStage<Void> cleanUpDir(DirIndex dirIndex, Metadata metadata) { String dirName = dirIndex.getDirName(); if (DirIndex.ROOT_DIR_NAME.equals(dirName)) { LOG.debug("Cleaning up root dir in blob store."); } else { LOG.debug("Cleaning up dir: {} in blob store.", dirIndex.getDirName()); } List<CompletionStage<Void>> cleanUpFuture = new ArrayList<>(); List<FileIndex> files = dirIndex.getFilesRemoved(); for (FileIndex file: files) { Metadata requestMetadata = new Metadata(file.getFileName(), Optional.of(file.getFileMetadata().getSize()), metadata.getJobName(), metadata.getJobId(), metadata.getTaskName(), metadata.getStoreName()); cleanUpFuture.add(deleteFile(file, requestMetadata)); } for (DirIndex subDirToDelete : dirIndex.getSubDirsRemoved()) { // recursively delete ALL contents of the subDirToDelete. cleanUpFuture.add(deleteDir(subDirToDelete, metadata)); } for (DirIndex subDirToRetain : dirIndex.getSubDirsPresent()) { // recursively clean up the subDir, only deleting files and subdirs marked for deletion. cleanUpFuture.add(cleanUpDir(subDirToRetain, metadata)); } return CompletableFuture.allOf(cleanUpFuture.toArray(new CompletableFuture[0])); } /** * Gets a file from the blob store. * @param fileBlobs List of {@link FileBlob}s that constitute this file. * @param fileToRestore File pointing to the local path where the file will be restored. * @param requestMetadata {@link Metadata} associated with this request * @param getDeleted Flag that indicates whether to try to get Deleted (but not yet compacted) files. * @return a future that completes when the file is downloaded and written or if an exception occurs. */ @VisibleForTesting CompletableFuture<Void> getFile(List<FileBlob> fileBlobs, File fileToRestore, Metadata requestMetadata, boolean getDeleted) { FileOutputStream outputStream = null; try { long restoreFileStartTime = System.nanoTime(); if (fileToRestore.exists()) { // delete the file if it already exists, e.g. from a previous retry. Files.delete(fileToRestore.toPath()); } outputStream = new FileOutputStream(fileToRestore); final FileOutputStream finalOutputStream = outputStream; // TODO HIGH shesharm add integration tests to ensure empty files and directories are handled correctly E2E. fileToRestore.createNewFile(); // create file for 0 byte files (fileIndex entry but no fileBlobs). // create a copy to ensure list being sorted is mutable. List<FileBlob> fileBlobsCopy = new ArrayList<>(fileBlobs); fileBlobsCopy.sort(Comparator.comparingInt(FileBlob::getOffset)); // sort by offset. // chain the futures such that write to file for blobs is sequential. // can be optimized to write concurrently to the file later. CompletableFuture<Void> resultFuture = CompletableFuture.completedFuture(null); for (FileBlob fileBlob : fileBlobsCopy) { resultFuture = resultFuture.thenComposeAsync(v -> { LOG.debug("Starting restore for file: {} with blob id: {} at offset: {} with getDeleted set to: {}", fileToRestore, fileBlob.getBlobId(), fileBlob.getOffset(), getDeleted); return blobStoreManager.get(fileBlob.getBlobId(), finalOutputStream, requestMetadata, getDeleted); }, executor); } resultFuture = resultFuture.thenRunAsync(() -> { LOG.debug("Finished restore for file: {}. Closing output stream.", fileToRestore); try { // flush the file contents to disk finalOutputStream.getFD().sync(); finalOutputStream.close(); } catch (Exception e) { throw new SamzaException(String.format("Error closing output stream for file: %s", fileToRestore.getAbsolutePath()), e); } }, executor); resultFuture.whenComplete((res, ex) -> { if (restoreMetrics != null) { restoreMetrics.avgFileRestoreNs.update(System.nanoTime() - restoreFileStartTime); long fileSize = requestMetadata.getPayloadSize(); restoreMetrics.restoreRate.inc(fileSize); restoreMetrics.filesRestored.getValue().addAndGet(1); restoreMetrics.bytesRestored.getValue().addAndGet(fileSize); restoreMetrics.filesRemaining.getValue().addAndGet(-1); restoreMetrics.bytesRemaining.getValue().addAndGet(-1 * fileSize); } }); return resultFuture; } catch (Exception exception) { try { if (outputStream != null) { outputStream.close(); } } catch (Exception err) { LOG.error("Error closing output stream for file: {}", fileToRestore.getAbsolutePath(), err); } throw new SamzaException(String.format("Error restoring file: %s in path: %s", fileToRestore.getName(), requestMetadata.getPayloadPath()), exception); } } /** * Upload a File to blob store. * @param file File to upload to blob store. * @return A future containing the {@link FileIndex} for the uploaded file. */ @VisibleForTesting public CompletableFuture<FileIndex> putFile(File file, SnapshotMetadata snapshotMetadata) { if (file == null || !file.isFile()) { String message = file != null ? "Dir or Symbolic link" : "null"; throw new SamzaException(String.format("Required a non-null parameter of type file, provided: %s", message)); } long putFileStartTime = System.nanoTime(); String opName = "putFile: " + file.getAbsolutePath(); Supplier<CompletionStage<FileIndex>> fileUploadAction = () -> { LOG.debug("Putting file: {} to blob store.", file.getPath()); CompletableFuture<FileIndex> fileBlobFuture; CheckedInputStream inputStream = null; try { // TODO HIGH shesharm maybe use the more efficient CRC32C / PureJavaCRC32 impl inputStream = new CheckedInputStream(new FileInputStream(file), new CRC32()); CheckedInputStream finalInputStream = inputStream; FileMetadata fileMetadata = FileMetadata.fromFile(file); if (backupMetrics != null) { backupMetrics.avgFileSizeBytes.update(fileMetadata.getSize()); } Metadata metadata = new Metadata(file.getAbsolutePath(), Optional.of(fileMetadata.getSize()), snapshotMetadata.getJobName(), snapshotMetadata.getJobId(), snapshotMetadata.getTaskName(), snapshotMetadata.getStoreName()); fileBlobFuture = blobStoreManager.put(inputStream, metadata) .thenApplyAsync(id -> { LOG.trace("Put complete. Received Blob ID {}. Closing input stream for file: {}.", id, file.getPath()); try { finalInputStream.close(); } catch (Exception e) { throw new SamzaException(String.format("Error closing input stream for file: %s", file.getAbsolutePath()), e); } LOG.trace("Returning new FileIndex for file: {}.", file.getPath()); return new FileIndex( file.getName(), Collections.singletonList(new FileBlob(id, 0)), fileMetadata, finalInputStream.getChecksum().getValue()); }, executor).toCompletableFuture(); } catch (Exception e) { try { if (inputStream != null) { inputStream.close(); } } catch (Exception err) { LOG.error("Error closing input stream for file: {}", file.getName(), err); } LOG.error("Error putting file: {}", file.getName(), e); throw new SamzaException(String.format("Error putting file %s", file.getAbsolutePath()), e); } return fileBlobFuture; }; return FutureUtil.executeAsyncWithRetries(opName, fileUploadAction, isCauseNonRetriable(), executor, retryPolicyConfig) .whenComplete((res, ex) -> { if (backupMetrics != null) { backupMetrics.avgFileUploadNs.update(System.nanoTime() - putFileStartTime); long fileSize = file.length(); backupMetrics.uploadRate.inc(fileSize); backupMetrics.filesUploaded.getValue().addAndGet(1); backupMetrics.bytesUploaded.getValue().addAndGet(fileSize); backupMetrics.filesRemaining.getValue().addAndGet(-1); backupMetrics.bytesRemaining.getValue().addAndGet(-1 * fileSize); } }); } /** * Delete a {@link FileIndex} from the remote store by deleting all {@link FileBlob}s associated with it. * @param fileIndex FileIndex of the file to delete from the remote store. * @param metadata * @return a future that completes when the FileIndex has been marked for deletion in the remote blob store. */ private CompletionStage<Void> deleteFile(FileIndex fileIndex, Metadata metadata) { List<CompletionStage<Void>> deleteFutures = new ArrayList<>(); List<FileBlob> fileBlobs = fileIndex.getBlobs(); for (FileBlob fileBlob : fileBlobs) { LOG.debug("Deleting file: {} blobId: {} from blob store.", fileIndex.getFileName(), fileBlob.getBlobId()); String opName = "deleteFile: " + fileIndex.getFileName() + " blobId: " + fileBlob.getBlobId(); Supplier<CompletionStage<Void>> fileDeletionAction = () -> blobStoreManager.delete(fileBlob.getBlobId(), metadata).toCompletableFuture(); CompletableFuture<Void> fileDeletionFuture = FutureUtil.executeAsyncWithRetries(opName, fileDeletionAction, isCauseNonRetriable(), executor, retryPolicyConfig); deleteFutures.add(fileDeletionFuture); } return CompletableFuture.allOf(deleteFutures.toArray(new CompletableFuture[0])); } /** * Get the {@link SnapshotIndex} using the blob id and marks all the blobs associated with it to never expire, * including the SnapshotIndex itself. * @param indexBlobId Blob id of {@link SnapshotIndex} * @param metadata {@link Metadata} related to the request * @return A future that completes when all the files and subdirs associated with this remote snapshot, as well as * the {@link SnapshotIndex} associated with the snapshot are marked to never expire. */ public CompletableFuture<Void> removeTTLForSnapshotIndex(String indexBlobId, Metadata metadata) { return getSnapshotIndex(indexBlobId, metadata, false) .thenCompose(snapshotIndex -> removeTTL(indexBlobId, snapshotIndex, metadata)); } /** * Marks all the blobs associated with an {@link SnapshotIndex} to never expire, including the SnapshotIndex * @param snapshotIndex {@link SnapshotIndex} of the remote snapshot * @param metadata {@link Metadata} related to the request * @return A future that completes when all the files and subdirs associated with this remote snapshot are marked to * never expire. */ public CompletionStage<Void> removeTTL(String indexBlobId, SnapshotIndex snapshotIndex, Metadata metadata) { SnapshotMetadata snapshotMetadata = snapshotIndex.getSnapshotMetadata(); LOG.debug("Marking contents of SnapshotIndex: {} to never expire", snapshotMetadata.toString()); String opName = "removeTTL for SnapshotIndex for checkpointId: " + snapshotMetadata.getCheckpointId(); Supplier<CompletionStage<Void>> removeDirIndexTTLAction = () -> removeTTL(snapshotIndex.getDirIndex(), metadata).toCompletableFuture(); CompletableFuture<Void> dirIndexTTLRemovalFuture = FutureUtil.executeAsyncWithRetries(opName, removeDirIndexTTLAction, isCauseNonRetriable(), executor, retryPolicyConfig); return dirIndexTTLRemovalFuture.thenComposeAsync(aVoid -> { String op2Name = "removeTTL for indexBlobId: " + indexBlobId; Supplier<CompletionStage<Void>> removeIndexBlobTTLAction = () -> blobStoreManager.removeTTL(indexBlobId, metadata).toCompletableFuture(); return FutureUtil.executeAsyncWithRetries(op2Name, removeIndexBlobTTLAction, isCauseNonRetriable(), executor, retryPolicyConfig); }, executor); } /** * Recursively mark all the blobs associated with the {@link DirIndex} to never expire (remove TTL). * @param dirIndex the {@link DirIndex} whose contents' TTL needs to be removed * @param metadata {@link Metadata} related to the request * @return A future that completes when all the blobs associated with this dirIndex are marked to * never expire. */ private CompletableFuture<Void> removeTTL(DirIndex dirIndex, Metadata metadata) { String dirName = dirIndex.getDirName(); if (DirIndex.ROOT_DIR_NAME.equals(dirName)) { LOG.debug("Removing TTL for files and dirs present in DirIndex for root dir."); } else { LOG.debug("Removing TTL for files and dirs present in DirIndex for dir: {}", dirName); } List<CompletableFuture<Void>> updateTTLsFuture = new ArrayList<>(); for (DirIndex subDir: dirIndex.getSubDirsPresent()) { updateTTLsFuture.add(removeTTL(subDir, metadata)); } for (FileIndex file: dirIndex.getFilesPresent()) { Metadata requestMetadata = new Metadata(file.getFileName(), Optional.of(file.getFileMetadata().getSize()), metadata.getJobName(), metadata.getJobId(), metadata.getTaskName(), metadata.getStoreName()); List<FileBlob> fileBlobs = file.getBlobs(); for (FileBlob fileBlob : fileBlobs) { String opname = "removeTTL for fileBlob: " + file.getFileName() + " with blobId: {}" + fileBlob.getBlobId(); Supplier<CompletionStage<Void>> ttlRemovalAction = () -> blobStoreManager.removeTTL(fileBlob.getBlobId(), requestMetadata).toCompletableFuture(); CompletableFuture<Void> ttlRemovalFuture = FutureUtil.executeAsyncWithRetries(opname, ttlRemovalAction, isCauseNonRetriable(), executor, retryPolicyConfig); updateTTLsFuture.add(ttlRemovalFuture); } } return CompletableFuture.allOf(updateTTLsFuture.toArray(new CompletableFuture[0])); } private static Predicate<Throwable> isCauseNonRetriable() { return throwable -> { Throwable unwrapped = FutureUtil.unwrapExceptions(CompletionException.class, throwable); return unwrapped != null && !RetriableException.class.isAssignableFrom(unwrapped.getClass()); }; } }
google/schemaorg-java
35,583
src/main/java/com/google/schemaorg/core/SoftwareApplication.java
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableList; import com.google.schemaorg.JsonLdContext; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.DateTime; import com.google.schemaorg.core.datatype.Integer; import com.google.schemaorg.core.datatype.Number; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.PopularityScoreSpecification; import javax.annotation.Nullable; /** * Interface of <a * href="http://schema.org/SoftwareApplication}">http://schema.org/SoftwareApplication}</a>. */ public interface SoftwareApplication extends CreativeWork { /** * Builder interface of <a * href="http://schema.org/SoftwareApplication}">http://schema.org/SoftwareApplication}</a>. */ public interface Builder extends CreativeWork.Builder { @Override Builder addJsonLdContext(@Nullable JsonLdContext context); @Override Builder addJsonLdContext(@Nullable JsonLdContext.Builder context); @Override Builder setJsonLdId(@Nullable String value); @Override Builder setJsonLdReverse(String property, Thing obj); @Override Builder setJsonLdReverse(String property, Thing.Builder builder); /** Add a value to property about. */ Builder addAbout(Thing value); /** Add a value to property about. */ Builder addAbout(Thing.Builder value); /** Add a value to property about. */ Builder addAbout(String value); /** Add a value to property accessibilityAPI. */ Builder addAccessibilityAPI(Text value); /** Add a value to property accessibilityAPI. */ Builder addAccessibilityAPI(String value); /** Add a value to property accessibilityControl. */ Builder addAccessibilityControl(Text value); /** Add a value to property accessibilityControl. */ Builder addAccessibilityControl(String value); /** Add a value to property accessibilityFeature. */ Builder addAccessibilityFeature(Text value); /** Add a value to property accessibilityFeature. */ Builder addAccessibilityFeature(String value); /** Add a value to property accessibilityHazard. */ Builder addAccessibilityHazard(Text value); /** Add a value to property accessibilityHazard. */ Builder addAccessibilityHazard(String value); /** Add a value to property accountablePerson. */ Builder addAccountablePerson(Person value); /** Add a value to property accountablePerson. */ Builder addAccountablePerson(Person.Builder value); /** Add a value to property accountablePerson. */ Builder addAccountablePerson(String value); /** Add a value to property additionalType. */ Builder addAdditionalType(URL value); /** Add a value to property additionalType. */ Builder addAdditionalType(String value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating.Builder value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(String value); /** Add a value to property alternateName. */ Builder addAlternateName(Text value); /** Add a value to property alternateName. */ Builder addAlternateName(String value); /** Add a value to property alternativeHeadline. */ Builder addAlternativeHeadline(Text value); /** Add a value to property alternativeHeadline. */ Builder addAlternativeHeadline(String value); /** Add a value to property applicationCategory. */ Builder addApplicationCategory(Text value); /** Add a value to property applicationCategory. */ Builder addApplicationCategory(URL value); /** Add a value to property applicationCategory. */ Builder addApplicationCategory(String value); /** Add a value to property applicationSubCategory. */ Builder addApplicationSubCategory(Text value); /** Add a value to property applicationSubCategory. */ Builder addApplicationSubCategory(URL value); /** Add a value to property applicationSubCategory. */ Builder addApplicationSubCategory(String value); /** Add a value to property applicationSuite. */ Builder addApplicationSuite(Text value); /** Add a value to property applicationSuite. */ Builder addApplicationSuite(String value); /** Add a value to property associatedMedia. */ Builder addAssociatedMedia(MediaObject value); /** Add a value to property associatedMedia. */ Builder addAssociatedMedia(MediaObject.Builder value); /** Add a value to property associatedMedia. */ Builder addAssociatedMedia(String value); /** Add a value to property audience. */ Builder addAudience(Audience value); /** Add a value to property audience. */ Builder addAudience(Audience.Builder value); /** Add a value to property audience. */ Builder addAudience(String value); /** Add a value to property audio. */ Builder addAudio(AudioObject value); /** Add a value to property audio. */ Builder addAudio(AudioObject.Builder value); /** Add a value to property audio. */ Builder addAudio(String value); /** Add a value to property author. */ Builder addAuthor(Organization value); /** Add a value to property author. */ Builder addAuthor(Organization.Builder value); /** Add a value to property author. */ Builder addAuthor(Person value); /** Add a value to property author. */ Builder addAuthor(Person.Builder value); /** Add a value to property author. */ Builder addAuthor(String value); /** Add a value to property availableOnDevice. */ Builder addAvailableOnDevice(Text value); /** Add a value to property availableOnDevice. */ Builder addAvailableOnDevice(String value); /** Add a value to property award. */ Builder addAward(Text value); /** Add a value to property award. */ Builder addAward(String value); /** Add a value to property awards. */ Builder addAwards(Text value); /** Add a value to property awards. */ Builder addAwards(String value); /** Add a value to property character. */ Builder addCharacter(Person value); /** Add a value to property character. */ Builder addCharacter(Person.Builder value); /** Add a value to property character. */ Builder addCharacter(String value); /** Add a value to property citation. */ Builder addCitation(CreativeWork value); /** Add a value to property citation. */ Builder addCitation(CreativeWork.Builder value); /** Add a value to property citation. */ Builder addCitation(Text value); /** Add a value to property citation. */ Builder addCitation(String value); /** Add a value to property comment. */ Builder addComment(Comment value); /** Add a value to property comment. */ Builder addComment(Comment.Builder value); /** Add a value to property comment. */ Builder addComment(String value); /** Add a value to property commentCount. */ Builder addCommentCount(Integer value); /** Add a value to property commentCount. */ Builder addCommentCount(String value); /** Add a value to property contentLocation. */ Builder addContentLocation(Place value); /** Add a value to property contentLocation. */ Builder addContentLocation(Place.Builder value); /** Add a value to property contentLocation. */ Builder addContentLocation(String value); /** Add a value to property contentRating. */ Builder addContentRating(Text value); /** Add a value to property contentRating. */ Builder addContentRating(String value); /** Add a value to property contributor. */ Builder addContributor(Organization value); /** Add a value to property contributor. */ Builder addContributor(Organization.Builder value); /** Add a value to property contributor. */ Builder addContributor(Person value); /** Add a value to property contributor. */ Builder addContributor(Person.Builder value); /** Add a value to property contributor. */ Builder addContributor(String value); /** Add a value to property copyrightHolder. */ Builder addCopyrightHolder(Organization value); /** Add a value to property copyrightHolder. */ Builder addCopyrightHolder(Organization.Builder value); /** Add a value to property copyrightHolder. */ Builder addCopyrightHolder(Person value); /** Add a value to property copyrightHolder. */ Builder addCopyrightHolder(Person.Builder value); /** Add a value to property copyrightHolder. */ Builder addCopyrightHolder(String value); /** Add a value to property copyrightYear. */ Builder addCopyrightYear(Number value); /** Add a value to property copyrightYear. */ Builder addCopyrightYear(String value); /** Add a value to property countriesNotSupported. */ Builder addCountriesNotSupported(Text value); /** Add a value to property countriesNotSupported. */ Builder addCountriesNotSupported(String value); /** Add a value to property countriesSupported. */ Builder addCountriesSupported(Text value); /** Add a value to property countriesSupported. */ Builder addCountriesSupported(String value); /** Add a value to property creator. */ Builder addCreator(Organization value); /** Add a value to property creator. */ Builder addCreator(Organization.Builder value); /** Add a value to property creator. */ Builder addCreator(Person value); /** Add a value to property creator. */ Builder addCreator(Person.Builder value); /** Add a value to property creator. */ Builder addCreator(String value); /** Add a value to property dateCreated. */ Builder addDateCreated(Date value); /** Add a value to property dateCreated. */ Builder addDateCreated(DateTime value); /** Add a value to property dateCreated. */ Builder addDateCreated(String value); /** Add a value to property dateModified. */ Builder addDateModified(Date value); /** Add a value to property dateModified. */ Builder addDateModified(DateTime value); /** Add a value to property dateModified. */ Builder addDateModified(String value); /** Add a value to property datePublished. */ Builder addDatePublished(Date value); /** Add a value to property datePublished. */ Builder addDatePublished(String value); /** Add a value to property description. */ Builder addDescription(Text value); /** Add a value to property description. */ Builder addDescription(String value); /** Add a value to property device. */ Builder addDevice(Text value); /** Add a value to property device. */ Builder addDevice(String value); /** Add a value to property discussionUrl. */ Builder addDiscussionUrl(URL value); /** Add a value to property discussionUrl. */ Builder addDiscussionUrl(String value); /** Add a value to property downloadUrl. */ Builder addDownloadUrl(URL value); /** Add a value to property downloadUrl. */ Builder addDownloadUrl(String value); /** Add a value to property editor. */ Builder addEditor(Person value); /** Add a value to property editor. */ Builder addEditor(Person.Builder value); /** Add a value to property editor. */ Builder addEditor(String value); /** Add a value to property educationalAlignment. */ Builder addEducationalAlignment(AlignmentObject value); /** Add a value to property educationalAlignment. */ Builder addEducationalAlignment(AlignmentObject.Builder value); /** Add a value to property educationalAlignment. */ Builder addEducationalAlignment(String value); /** Add a value to property educationalUse. */ Builder addEducationalUse(Text value); /** Add a value to property educationalUse. */ Builder addEducationalUse(String value); /** Add a value to property encoding. */ Builder addEncoding(MediaObject value); /** Add a value to property encoding. */ Builder addEncoding(MediaObject.Builder value); /** Add a value to property encoding. */ Builder addEncoding(String value); /** Add a value to property encodings. */ Builder addEncodings(MediaObject value); /** Add a value to property encodings. */ Builder addEncodings(MediaObject.Builder value); /** Add a value to property encodings. */ Builder addEncodings(String value); /** Add a value to property exampleOfWork. */ Builder addExampleOfWork(CreativeWork value); /** Add a value to property exampleOfWork. */ Builder addExampleOfWork(CreativeWork.Builder value); /** Add a value to property exampleOfWork. */ Builder addExampleOfWork(String value); /** Add a value to property featureList. */ Builder addFeatureList(Text value); /** Add a value to property featureList. */ Builder addFeatureList(URL value); /** Add a value to property featureList. */ Builder addFeatureList(String value); /** Add a value to property fileFormat. */ Builder addFileFormat(Text value); /** Add a value to property fileFormat. */ Builder addFileFormat(String value); /** Add a value to property fileSize. */ Builder addFileSize(Text value); /** Add a value to property fileSize. */ Builder addFileSize(String value); /** Add a value to property genre. */ Builder addGenre(Text value); /** Add a value to property genre. */ Builder addGenre(URL value); /** Add a value to property genre. */ Builder addGenre(String value); /** Add a value to property hasPart. */ Builder addHasPart(CreativeWork value); /** Add a value to property hasPart. */ Builder addHasPart(CreativeWork.Builder value); /** Add a value to property hasPart. */ Builder addHasPart(String value); /** Add a value to property headline. */ Builder addHeadline(Text value); /** Add a value to property headline. */ Builder addHeadline(String value); /** Add a value to property image. */ Builder addImage(ImageObject value); /** Add a value to property image. */ Builder addImage(ImageObject.Builder value); /** Add a value to property image. */ Builder addImage(URL value); /** Add a value to property image. */ Builder addImage(String value); /** Add a value to property inLanguage. */ Builder addInLanguage(Language value); /** Add a value to property inLanguage. */ Builder addInLanguage(Language.Builder value); /** Add a value to property inLanguage. */ Builder addInLanguage(Text value); /** Add a value to property inLanguage. */ Builder addInLanguage(String value); /** Add a value to property installUrl. */ Builder addInstallUrl(URL value); /** Add a value to property installUrl. */ Builder addInstallUrl(String value); /** Add a value to property interactionStatistic. */ Builder addInteractionStatistic(InteractionCounter value); /** Add a value to property interactionStatistic. */ Builder addInteractionStatistic(InteractionCounter.Builder value); /** Add a value to property interactionStatistic. */ Builder addInteractionStatistic(String value); /** Add a value to property interactivityType. */ Builder addInteractivityType(Text value); /** Add a value to property interactivityType. */ Builder addInteractivityType(String value); /** Add a value to property isBasedOnUrl. */ Builder addIsBasedOnUrl(URL value); /** Add a value to property isBasedOnUrl. */ Builder addIsBasedOnUrl(String value); /** Add a value to property isFamilyFriendly. */ Builder addIsFamilyFriendly(Boolean value); /** Add a value to property isFamilyFriendly. */ Builder addIsFamilyFriendly(String value); /** Add a value to property isPartOf. */ Builder addIsPartOf(CreativeWork value); /** Add a value to property isPartOf. */ Builder addIsPartOf(CreativeWork.Builder value); /** Add a value to property isPartOf. */ Builder addIsPartOf(String value); /** Add a value to property keywords. */ Builder addKeywords(Text value); /** Add a value to property keywords. */ Builder addKeywords(String value); /** Add a value to property learningResourceType. */ Builder addLearningResourceType(Text value); /** Add a value to property learningResourceType. */ Builder addLearningResourceType(String value); /** Add a value to property license. */ Builder addLicense(CreativeWork value); /** Add a value to property license. */ Builder addLicense(CreativeWork.Builder value); /** Add a value to property license. */ Builder addLicense(URL value); /** Add a value to property license. */ Builder addLicense(String value); /** Add a value to property locationCreated. */ Builder addLocationCreated(Place value); /** Add a value to property locationCreated. */ Builder addLocationCreated(Place.Builder value); /** Add a value to property locationCreated. */ Builder addLocationCreated(String value); /** Add a value to property mainEntity. */ Builder addMainEntity(Thing value); /** Add a value to property mainEntity. */ Builder addMainEntity(Thing.Builder value); /** Add a value to property mainEntity. */ Builder addMainEntity(String value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork.Builder value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(URL value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(String value); /** Add a value to property memoryRequirements. */ Builder addMemoryRequirements(Text value); /** Add a value to property memoryRequirements. */ Builder addMemoryRequirements(URL value); /** Add a value to property memoryRequirements. */ Builder addMemoryRequirements(String value); /** Add a value to property mentions. */ Builder addMentions(Thing value); /** Add a value to property mentions. */ Builder addMentions(Thing.Builder value); /** Add a value to property mentions. */ Builder addMentions(String value); /** Add a value to property name. */ Builder addName(Text value); /** Add a value to property name. */ Builder addName(String value); /** Add a value to property offers. */ Builder addOffers(Offer value); /** Add a value to property offers. */ Builder addOffers(Offer.Builder value); /** Add a value to property offers. */ Builder addOffers(String value); /** Add a value to property operatingSystem. */ Builder addOperatingSystem(Text value); /** Add a value to property operatingSystem. */ Builder addOperatingSystem(String value); /** Add a value to property permissions. */ Builder addPermissions(Text value); /** Add a value to property permissions. */ Builder addPermissions(String value); /** Add a value to property position. */ Builder addPosition(Integer value); /** Add a value to property position. */ Builder addPosition(Text value); /** Add a value to property position. */ Builder addPosition(String value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action.Builder value); /** Add a value to property potentialAction. */ Builder addPotentialAction(String value); /** Add a value to property processorRequirements. */ Builder addProcessorRequirements(Text value); /** Add a value to property processorRequirements. */ Builder addProcessorRequirements(String value); /** Add a value to property producer. */ Builder addProducer(Organization value); /** Add a value to property producer. */ Builder addProducer(Organization.Builder value); /** Add a value to property producer. */ Builder addProducer(Person value); /** Add a value to property producer. */ Builder addProducer(Person.Builder value); /** Add a value to property producer. */ Builder addProducer(String value); /** Add a value to property provider. */ Builder addProvider(Organization value); /** Add a value to property provider. */ Builder addProvider(Organization.Builder value); /** Add a value to property provider. */ Builder addProvider(Person value); /** Add a value to property provider. */ Builder addProvider(Person.Builder value); /** Add a value to property provider. */ Builder addProvider(String value); /** Add a value to property publication. */ Builder addPublication(PublicationEvent value); /** Add a value to property publication. */ Builder addPublication(PublicationEvent.Builder value); /** Add a value to property publication. */ Builder addPublication(String value); /** Add a value to property publisher. */ Builder addPublisher(Organization value); /** Add a value to property publisher. */ Builder addPublisher(Organization.Builder value); /** Add a value to property publisher. */ Builder addPublisher(Person value); /** Add a value to property publisher. */ Builder addPublisher(Person.Builder value); /** Add a value to property publisher. */ Builder addPublisher(String value); /** Add a value to property publishingPrinciples. */ Builder addPublishingPrinciples(URL value); /** Add a value to property publishingPrinciples. */ Builder addPublishingPrinciples(String value); /** Add a value to property recordedAt. */ Builder addRecordedAt(Event value); /** Add a value to property recordedAt. */ Builder addRecordedAt(Event.Builder value); /** Add a value to property recordedAt. */ Builder addRecordedAt(String value); /** Add a value to property releasedEvent. */ Builder addReleasedEvent(PublicationEvent value); /** Add a value to property releasedEvent. */ Builder addReleasedEvent(PublicationEvent.Builder value); /** Add a value to property releasedEvent. */ Builder addReleasedEvent(String value); /** Add a value to property releaseNotes. */ Builder addReleaseNotes(Text value); /** Add a value to property releaseNotes. */ Builder addReleaseNotes(URL value); /** Add a value to property releaseNotes. */ Builder addReleaseNotes(String value); /** Add a value to property requirements. */ Builder addRequirements(Text value); /** Add a value to property requirements. */ Builder addRequirements(URL value); /** Add a value to property requirements. */ Builder addRequirements(String value); /** Add a value to property review. */ Builder addReview(Review value); /** Add a value to property review. */ Builder addReview(Review.Builder value); /** Add a value to property review. */ Builder addReview(String value); /** Add a value to property reviews. */ Builder addReviews(Review value); /** Add a value to property reviews. */ Builder addReviews(Review.Builder value); /** Add a value to property reviews. */ Builder addReviews(String value); /** Add a value to property sameAs. */ Builder addSameAs(URL value); /** Add a value to property sameAs. */ Builder addSameAs(String value); /** Add a value to property schemaVersion. */ Builder addSchemaVersion(Text value); /** Add a value to property schemaVersion. */ Builder addSchemaVersion(URL value); /** Add a value to property schemaVersion. */ Builder addSchemaVersion(String value); /** Add a value to property screenshot. */ Builder addScreenshot(ImageObject value); /** Add a value to property screenshot. */ Builder addScreenshot(ImageObject.Builder value); /** Add a value to property screenshot. */ Builder addScreenshot(URL value); /** Add a value to property screenshot. */ Builder addScreenshot(String value); /** Add a value to property softwareAddOn. */ Builder addSoftwareAddOn(SoftwareApplication value); /** Add a value to property softwareAddOn. */ Builder addSoftwareAddOn(SoftwareApplication.Builder value); /** Add a value to property softwareAddOn. */ Builder addSoftwareAddOn(String value); /** Add a value to property softwareHelp. */ Builder addSoftwareHelp(CreativeWork value); /** Add a value to property softwareHelp. */ Builder addSoftwareHelp(CreativeWork.Builder value); /** Add a value to property softwareHelp. */ Builder addSoftwareHelp(String value); /** Add a value to property softwareRequirements. */ Builder addSoftwareRequirements(Text value); /** Add a value to property softwareRequirements. */ Builder addSoftwareRequirements(URL value); /** Add a value to property softwareRequirements. */ Builder addSoftwareRequirements(String value); /** Add a value to property softwareVersion. */ Builder addSoftwareVersion(Text value); /** Add a value to property softwareVersion. */ Builder addSoftwareVersion(String value); /** Add a value to property sourceOrganization. */ Builder addSourceOrganization(Organization value); /** Add a value to property sourceOrganization. */ Builder addSourceOrganization(Organization.Builder value); /** Add a value to property sourceOrganization. */ Builder addSourceOrganization(String value); /** Add a value to property storageRequirements. */ Builder addStorageRequirements(Text value); /** Add a value to property storageRequirements. */ Builder addStorageRequirements(URL value); /** Add a value to property storageRequirements. */ Builder addStorageRequirements(String value); /** Add a value to property supportingData. */ Builder addSupportingData(DataFeed value); /** Add a value to property supportingData. */ Builder addSupportingData(DataFeed.Builder value); /** Add a value to property supportingData. */ Builder addSupportingData(String value); /** Add a value to property text. */ Builder addText(Text value); /** Add a value to property text. */ Builder addText(String value); /** Add a value to property thumbnailUrl. */ Builder addThumbnailUrl(URL value); /** Add a value to property thumbnailUrl. */ Builder addThumbnailUrl(String value); /** Add a value to property timeRequired. */ Builder addTimeRequired(Duration value); /** Add a value to property timeRequired. */ Builder addTimeRequired(Duration.Builder value); /** Add a value to property timeRequired. */ Builder addTimeRequired(String value); /** Add a value to property translator. */ Builder addTranslator(Organization value); /** Add a value to property translator. */ Builder addTranslator(Organization.Builder value); /** Add a value to property translator. */ Builder addTranslator(Person value); /** Add a value to property translator. */ Builder addTranslator(Person.Builder value); /** Add a value to property translator. */ Builder addTranslator(String value); /** Add a value to property typicalAgeRange. */ Builder addTypicalAgeRange(Text value); /** Add a value to property typicalAgeRange. */ Builder addTypicalAgeRange(String value); /** Add a value to property url. */ Builder addUrl(URL value); /** Add a value to property url. */ Builder addUrl(String value); /** Add a value to property version. */ Builder addVersion(Number value); /** Add a value to property version. */ Builder addVersion(String value); /** Add a value to property video. */ Builder addVideo(VideoObject value); /** Add a value to property video. */ Builder addVideo(VideoObject.Builder value); /** Add a value to property video. */ Builder addVideo(String value); /** Add a value to property workExample. */ Builder addWorkExample(CreativeWork value); /** Add a value to property workExample. */ Builder addWorkExample(CreativeWork.Builder value); /** Add a value to property workExample. */ Builder addWorkExample(String value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article.Builder value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(String value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification.Builder value); /** Add a value to property popularityScore. */ Builder addPopularityScore(String value); /** * Add a value to property. * * @param name The property name. * @param value The value of the property. */ Builder addProperty(String name, SchemaOrgType value); /** * Add a value to property. * * @param name The property name. * @param builder The schema.org object builder for the property value. */ Builder addProperty(String name, Thing.Builder builder); /** * Add a value to property. * * @param name The property name. * @param value The string value of the property. */ Builder addProperty(String name, String value); /** Build a {@link SoftwareApplication} object. */ SoftwareApplication build(); } /** * Returns the value list of property applicationCategory. Empty list is returned if the property * not set in current object. */ ImmutableList<SchemaOrgType> getApplicationCategoryList(); /** * Returns the value list of property applicationSubCategory. Empty list is returned if the * property not set in current object. */ ImmutableList<SchemaOrgType> getApplicationSubCategoryList(); /** * Returns the value list of property applicationSuite. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getApplicationSuiteList(); /** * Returns the value list of property availableOnDevice. Empty list is returned if the property * not set in current object. */ ImmutableList<SchemaOrgType> getAvailableOnDeviceList(); /** * Returns the value list of property countriesNotSupported. Empty list is returned if the * property not set in current object. */ ImmutableList<SchemaOrgType> getCountriesNotSupportedList(); /** * Returns the value list of property countriesSupported. Empty list is returned if the property * not set in current object. */ ImmutableList<SchemaOrgType> getCountriesSupportedList(); /** * Returns the value list of property device. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getDeviceList(); /** * Returns the value list of property downloadUrl. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getDownloadUrlList(); /** * Returns the value list of property featureList. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getFeatureListList(); /** * Returns the value list of property fileSize. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getFileSizeList(); /** * Returns the value list of property installUrl. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getInstallUrlList(); /** * Returns the value list of property memoryRequirements. Empty list is returned if the property * not set in current object. */ ImmutableList<SchemaOrgType> getMemoryRequirementsList(); /** * Returns the value list of property operatingSystem. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getOperatingSystemList(); /** * Returns the value list of property permissions. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getPermissionsList(); /** * Returns the value list of property processorRequirements. Empty list is returned if the * property not set in current object. */ ImmutableList<SchemaOrgType> getProcessorRequirementsList(); /** * Returns the value list of property releaseNotes. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getReleaseNotesList(); /** * Returns the value list of property requirements. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getRequirementsList(); /** * Returns the value list of property screenshot. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getScreenshotList(); /** * Returns the value list of property softwareAddOn. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getSoftwareAddOnList(); /** * Returns the value list of property softwareHelp. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getSoftwareHelpList(); /** * Returns the value list of property softwareRequirements. Empty list is returned if the property * not set in current object. */ ImmutableList<SchemaOrgType> getSoftwareRequirementsList(); /** * Returns the value list of property softwareVersion. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getSoftwareVersionList(); /** * Returns the value list of property storageRequirements. Empty list is returned if the property * not set in current object. */ ImmutableList<SchemaOrgType> getStorageRequirementsList(); /** * Returns the value list of property supportingData. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getSupportingDataList(); }
oracle/nosql
35,498
kvtest/kvstore-IT/src/main/java/oracle/kv/KVLocalTest.java
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * */ package oracle.kv; import static java.util.concurrent.TimeUnit.SECONDS; import static oracle.kv.KVLocalTestModeBase.TestMode; import static oracle.kv.KVLocalTestModeBase.checkUnixDomainSocketsSupported; import static oracle.kv.KVLocalTestModeBase.getConfigBuilder; import static oracle.kv.impl.util.VersionUtil.getJavaMajorVersion; import static oracle.kv.util.TestUtils.checkException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.Arrays; import java.util.UUID; import java.util.concurrent.CompletableFuture; import oracle.kv.impl.util.FileUtils; import oracle.kv.impl.util.PollCondition; import oracle.kv.impl.util.registry.AsyncControl; import oracle.kv.table.PrimaryKey; import oracle.kv.table.Row; import oracle.kv.table.Table; import oracle.kv.table.TableAPI; import oracle.kv.util.kvlite.KVLite; import org.junit.BeforeClass; import org.junit.Test; /** * Exercise KVLocal. */ /* Increase test timeout to 40 minutes -- test can take 30 minutes.*/ @TestClassTimeoutMillis(40*60*1000) public class KVLocalTest extends TestBase { private static final boolean java16 = getJavaMajorVersion() >= 16; private static final boolean deleteTestDirOnExit = true; private KVLocal local; private File testDir; @BeforeClass public static void ensureAsyncEnabled() { assumeTrue("KVLocal requires async", AsyncControl.serverUseAsync); } @Override public void setUp() throws Exception { suppressSystemError(); suppressSystemOut(); } @Override public void tearDown() throws Exception { super.tearDown(); /* Cleanup running store */ if (local != null) { try { local.stop(); } catch (Exception e) { } local = null; } resetSystemError(); resetSystemOut(); if (deleteTestDirOnExit && (testDir != null)) { FileUtils.deleteDirectory(testDir); } } @Test public void testInetBuilder() throws Exception { checkException(() -> new KVLocalConfig.InetBuilder(""), IllegalArgumentException.class, "The root directory cannot be null or empty"); checkException(() -> new KVLocalConfig.InetBuilder(null), IllegalArgumentException.class, "The root directory cannot be null or empty"); final KVLocalConfig.Builder builder = new KVLocalConfig.InetBuilder("kvrootDir"); checkException(() -> builder.setStoreName(""), IllegalArgumentException.class, "Store name cannot be null or empty"); checkException(() -> builder.setStoreName(null), IllegalArgumentException.class, "Store name cannot be null or empty"); checkException(() -> builder.setHostName(""), IllegalArgumentException.class, "Host name cannot be null or empty"); checkException(() -> builder.setHostName(null), IllegalArgumentException.class, "Host name cannot be null or empty"); checkException(() -> builder.setPort(-1), IllegalArgumentException.class, "Illegal port"); checkException(() -> builder.setMemoryMB(0), IllegalArgumentException.class, "Memory size must not be less"); checkException(() -> builder.setStorageGB(-1), IllegalArgumentException.class, "Illegal storage directory size"); /* Verify default values */ KVLocalConfig config = new KVLocalConfig.InetBuilder("kvrootDir").build(); assertEquals(5000, config.getPort()); assertTrue(config.isSecure()); assertEquals("kvstore", config.getStoreName()); assertEquals("localhost", config.getHostName()); assertEquals(8192, config.getMemoryMB()); assertEquals(10, config.getStorageGB()); assertEquals("kvrootDir", config.getRootDirectory()); assertFalse(config.isUnixDomain()); assertEquals("<KVLocalConfig kvroot=kvrootDir storeName=kvstore " + "hostName=localhost memoryMB=8192 storageGB=10 " + "port=5000 isSecure=true isUnixDomain=false>", config.toString()); config = new KVLocalConfig.InetBuilder("anotherRootDir") .setPort(3000) .isSecure(false) .setStoreName("anotherStore") .setHostName("anotherHost") .setStorageGB(20) .setMemoryMB(256) .build(); assertEquals(3000, config.getPort()); assertFalse(config.isSecure()); assertEquals("anotherStore", config.getStoreName()); assertEquals("anotherHost", config.getHostName()); assertEquals(256, config.getMemoryMB()); assertEquals(20, config.getStorageGB()); assertEquals("anotherRootDir", config.getRootDirectory()); assertFalse(config.isUnixDomain()); } @Test public void testUnixDomainBuilder() throws Exception { checkException(() -> new KVLocalConfig.UnixDomainBuilder(""), IllegalArgumentException.class, "The root directory cannot be null or empty"); checkException(() -> new KVLocalConfig.UnixDomainBuilder(null), IllegalArgumentException.class, "The root directory cannot be null or empty"); final KVLocalConfig.Builder builder = new KVLocalConfig.UnixDomainBuilder("kvrootDir"); checkException(() -> builder.setStoreName(""), IllegalArgumentException.class, "Store name cannot be null or empty"); checkException(() -> builder.setStoreName(null), IllegalArgumentException.class, "Store name cannot be null or empty"); checkException(() -> builder.setHostName("host"), UnsupportedOperationException.class, "UnixDomainBuilder does not support setHostName"); checkException(() -> builder.setPort(6000), UnsupportedOperationException.class, "UnixDomainBuilder does not support setPort"); checkException(() -> builder.isSecure(false), UnsupportedOperationException.class, "UnixDomainBuilder does not support isSecure"); checkException(() -> builder.setMemoryMB(0), IllegalArgumentException.class, "Memory size must not be less"); checkException(() -> builder.setStorageGB(-1), IllegalArgumentException.class, "Illegal storage directory size"); /* Verify default values */ KVLocalConfig config = new KVLocalConfig.UnixDomainBuilder("kvrootDir").build(); assertEquals(5000, config.getPort()); assertFalse(config.isSecure()); assertEquals("kvstore", config.getStoreName()); assertEquals("unix_domain:kvrootDir/sockets/sock", config.getHostName()); assertEquals(8192, config.getMemoryMB()); assertEquals(10, config.getStorageGB()); assertEquals("kvrootDir", config.getRootDirectory()); assertTrue(config.isUnixDomain()); assertEquals("<KVLocalConfig kvroot=kvrootDir storeName=kvstore " + "memoryMB=8192 storageGB=10 isUnixDomain=true>", config.toString()); config = new KVLocalConfig.UnixDomainBuilder("anotherRootDir") .setStoreName("anotherStore") .setStorageGB(20) .setMemoryMB(256) .build(); assertEquals(5000, config.getPort()); assertFalse(config.isSecure()); assertEquals("anotherStore", config.getStoreName()); assertEquals("unix_domain:anotherRootDir/sockets/sock", config.getHostName()); assertEquals(256, config.getMemoryMB()); assertEquals(20, config.getStorageGB()); assertEquals("anotherRootDir", config.getRootDirectory()); assertTrue(config.isUnixDomain()); } @Test public void testStartInetSecure() throws Exception { testStart(TestMode.INET_SECURE); } @Test public void testStartInetNonsecure() throws Exception { testStart(TestMode.INET_NONSECURE); } @Test public void testStartUnixDomain() throws Exception { checkUnixDomainSocketsSupported(TestMode.UNIX_DOMAIN); testStart(TestMode.UNIX_DOMAIN); } private void testStart(TestMode testMode) throws Exception { String rootDir = makeTestDir("start"); /* The KVLocalConfig cannot be null */ checkException(() -> KVLocal.start(null), IllegalArgumentException.class, "The KVLocalConfig cannot be null"); final KVLocalConfig config = getConfigBuilder(rootDir, testMode) .build(); local = KVLocal.start(config); assertTrue(local.isRunning()); /* 2nd start() should fail */ checkException(() -> KVLocal.start(config), IllegalStateException.class, "Only one instance of embedded NoSQL database" + " can be running"); local.stop(); assertTrue(!local.isRunning()); /* Stop an already stopped KVLocal */ local.stop(); /* Stop a KVLocal that has no embedded store */ { KVLocal local2 = KVLocal.getKVLocal(rootDir); checkException(() -> local2.stop(), IllegalStateException.class, "not started as an embedded instance"); } /* Wrong root dir */ { String rootDir2 = makeTestDir("start2"); FileUtils.copyDir(new File(rootDir), new File(rootDir2)); KVLocalConfig config2 = getConfigBuilder(rootDir2, testMode) .build(); checkException( () -> KVLocal.start(config2), IllegalArgumentException.class, "does not match the existing store's root directory"); } /* Wrong store name */ { KVLocalConfig config2 = getConfigBuilder(rootDir, testMode) .setStoreName("another-store") .build(); checkException(() -> KVLocal.start(config2), IllegalArgumentException.class, "does not match the existing store's storename"); } /* KVLocalConfig port doesn't match existing store's port. */ { if (testMode != TestMode.UNIX_DOMAIN) { KVLocalConfig config2 = getConfigBuilder(rootDir, testMode) .setPort(6000) .build(); checkException(() -> KVLocal.start(config2), IllegalArgumentException.class, "does not match the existing store's port"); } } /* KVLocalConfig hostName doesn't match existing store's hostName. */ { if (testMode != TestMode.UNIX_DOMAIN) { KVLocalConfig config2 = getConfigBuilder(rootDir, testMode) .setHostName("kvstoreException") .build(); checkException(() -> KVLocal.start(config2), IllegalArgumentException.class, "does not match the existing store's hostname"); } } /* KVLocalConfig security doesn't match existing store's security. */ { if (testMode == TestMode.INET_SECURE) { KVLocalConfig config2 = getConfigBuilder(rootDir, testMode) .isSecure(false) .build(); checkException(() -> KVLocal.start(config2), IllegalArgumentException.class, "does not match the existing store, " + "which has security enabled"); } } /* Wrong memoryMB */ { KVLocalConfig config2 = getConfigBuilder(rootDir, testMode) .setMemoryMB(12345) .build(); checkException(() -> KVLocal.start(config2), IllegalArgumentException.class, "does not match the existing store's memoryMB"); } /* Wrong storageGB */ { KVLocalConfig config2 = getConfigBuilder(rootDir, testMode) .setStorageGB(42) .build(); checkException(() -> KVLocal.start(config2), IllegalArgumentException.class, "does not match the existing store's storageGB"); } /* Root directory's parent directory does not exist * Note: the directory "/nonexist/parentdir" should not exist in * the machine where this test script is run. */ { KVLocalConfig config2 = getConfigBuilder("/nonexist/parentdir/rootdir", testMode) .build(); checkException(() -> KVLocal.start(config2), KVLocalException.class, "Problem creating root directory"); } local = null; } @Test public void testStartExistingStoreInetSecure() throws Exception { testStartExistingStore(TestMode.INET_SECURE); } @Test public void testStartExistingStoreInetNonsecure() throws Exception { testStartExistingStore(TestMode.INET_NONSECURE); } @Test public void testStartExistingStoreUnixDomain() throws Exception { checkUnixDomainSocketsSupported(TestMode.UNIX_DOMAIN); testStartExistingStore(TestMode.UNIX_DOMAIN); } private void testStartExistingStore(TestMode testMode) throws IOException { String rootDirStr = makeTestDir("startExist"); /* startExistingStore() should fail if no store directory was found */ try { KVLocal.startExistingStore(rootDirStr); } catch(IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Configuration file of the existing store was not found")); } KVLocalConfig config = getConfigBuilder(rootDirStr, testMode) .build(); local = KVLocal.start(config); local.stop(); /* 2nd startExistingStore() should fail */ local = KVLocal.startExistingStore(rootDirStr); try { KVLocal.startExistingStore(rootDirStr); fail("2nd startExistingStore didn't fail"); } catch(IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage() .contains("Only one instance of embedded NoSQL database" + " can be running")); } local.stop(); /* startExistingStore() should set KVLocal configuration parameters * correctly */ local = KVLocal.startExistingStore(rootDirStr); local.stop(); /* No root directory exists */ String nonExistentRoot = makeTestDir("non-existent"); try { KVLocal.startExistingStore(nonExistentRoot); fail("startExistingStore() didn't fail when no root directory" + " was found"); } catch(IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Configuration file of the existing store was not found")); } /* Root directory is empty */ File rootDir = new File(nonExistentRoot); rootDir.mkdir(); try { KVLocal.startExistingStore(nonExistentRoot); fail("startExistingStore() didn't fail when root directory" + " was empty"); } catch(IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Configuration file of the existing store was not found")); } /* start() should reuse the empty root directory */ config = getConfigBuilder(nonExistentRoot, testMode) .build(); local = KVLocal.start(config); local.stop(); /* startExistingStore() should fail if kvroot/kvstore directory * is missing */ File kvstorePath = new File(nonExistentRoot+File.separator+"kvstore"); FileUtils.deleteDirectory(kvstorePath); checkException(() -> KVLocal.startExistingStore(nonExistentRoot), IllegalStateException.class, "Store directory was not found"); local = null; } @Test public void testSecurity() throws Exception { String rootDir = makeTestDir("secure"); KVLocalConfig config1 = new KVLocalConfig.InetBuilder(rootDir) .isSecure(true) .build(); local = KVLocal.start(config1); loadData(UUID.randomUUID().toString()); local.verifyConfiguration(false); local.stop(); local = KVLocal.startExistingStore(rootDir); local.stop(); rootDir = makeTestDir("nonsecure"); KVLocalConfig config2 = new KVLocalConfig.InetBuilder(rootDir) .isSecure(false) .build(); local = KVLocal.start(config2); loadData(UUID.randomUUID().toString()); local.verifyConfiguration(false); local.stop(); local = KVLocal.startExistingStore(rootDir); local.stop(); local = null; } @Test public void testDiagnosticToolInetSecure() throws Exception { testDiagnosticTool(TestMode.INET_SECURE); } @Test public void testDiagnosticToolInetNonsecure() throws Exception { testDiagnosticTool(TestMode.INET_NONSECURE); } @Test public void testDiagnosticToolUnixDomain() throws Exception { checkUnixDomainSocketsSupported(TestMode.UNIX_DOMAIN); testDiagnosticTool(TestMode.UNIX_DOMAIN); } private void testDiagnosticTool(TestMode testMode) throws Exception { String rootDir = makeTestDir("diagnostic"); KVLocalConfig config = getConfigBuilder(rootDir, testMode).build(); local = KVLocal.start(config); KVLocal handle = KVLocal.getKVLocal(rootDir); String verifyVerbose = handle.verifyConfiguration(true); assertTrue(verifyVerbose.contains("Operation ends successfully")); String verifyNonVerbose = handle.verifyConfiguration(false); assertTrue(verifyNonVerbose .equals("{}")); String verifyDataRes = handle.verifyData(); assertTrue(verifyDataRes.contains("No Btree Corruptions")); assertTrue(verifyDataRes.contains("No Log File Corruptions")); local.stop(); /* Verify configuration when store is not running. */ try { handle.verifyConfiguration(true); fail("verifyConfiguration executed when store is not running"); } catch(KVLocalException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Exception in getting admin command service," + " maybe the store is not running")); } /* Verify data when store is not running. */ try { handle.verifyData(); fail("verifyData executed when store is not running"); } catch(KVLocalException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Exception in getting admin command service," + " maybe the store is not running")); } local = null; } @Test public void testSnapshotInetSecure() throws Exception { testSnapshot(TestMode.INET_SECURE); } @Test public void testSnapshotInetNonsecure() throws Exception { testSnapshot(TestMode.INET_NONSECURE); } @Test public void testSnapshotUnixDomain() throws Exception { checkUnixDomainSocketsSupported(TestMode.UNIX_DOMAIN); testSnapshot(TestMode.UNIX_DOMAIN); } private void testSnapshot(TestMode testMode) throws Exception { String rootDir = makeTestDir("testSnapshot"); KVLocalConfig config = getConfigBuilder(rootDir, testMode).build(); local = KVLocal.start(config); String randomBefore = UUID.randomUUID().toString(); loadData(randomBefore); String snapshotName1 = local.createSnapshot("sp1"); String randomAfter = UUID.randomUUID().toString(); loadData(randomAfter); String[] snapshotNames = local.listSnapshots(); assertTrue(snapshotNames[0].equals(snapshotName1)); local.stop(); /* * Restore from snapshot. Verify the following: * The data created before snapshot should exist. * The data created after snapshot should not exist. */ local = KVLocal.restoreFromSnapshot(rootDir, snapshotName1); assertTrue(verifyDataExisted(randomBefore)); assertFalse(verifyDataExisted(randomAfter)); /* Restore from a snapshot when store is already running */ try { KVLocal.restoreFromSnapshot(rootDir, snapshotName1); fail("Restore from Snapshot when there is an already running" + " store"); } catch(IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage() .contains("Only one instance of embedded NoSQL database" + " can be running")); } local.stop(); /* Restore from a non-existent snapshot */ checkException(() -> KVLocal.restoreFromSnapshot( rootDir, "non-exist-snapshot"), IllegalStateException.class, "Snapshot directory not found"); /* * Start store again. Verify the following: * The data created before snapshot should exist. * The data created after snapshot should not exist. */ local = KVLocal.start(config); assertTrue(verifyDataExisted(randomBefore)); assertFalse(verifyDataExisted(randomAfter)); /* Remove snapshot. */ local.removeSnapshot(snapshotName1); snapshotNames = local.listSnapshots(); assertTrue(snapshotNames.length == 0); /* Remove a non-existent snapshot should complete with no error */ local.removeSnapshot(snapshotName1); local.stop(); /* Create snapshot when store is not running */ { KVLocal localFinal = local; checkException(() -> localFinal.createSnapshot("sp2"), KVLocalException.class, "maybe the store is not running"); } local = null; } @Test public void testGetStoreInetSecure() throws Exception { testGetStore(TestMode.INET_SECURE); } @Test public void testGetStoreInetNonsecure() throws Exception { testGetStore(TestMode.INET_NONSECURE); } @Test public void testGetStoreUnixDomain() throws Exception { checkUnixDomainSocketsSupported(TestMode.UNIX_DOMAIN); testGetStore(TestMode.UNIX_DOMAIN); } private void testGetStore(TestMode testMode) throws Exception { String rootDir = makeTestDir("getStore"); KVLocalConfig config1 = getConfigBuilder(rootDir, testMode).build(); local = KVLocal.start(config1); KVStore storeHandle1 = local.getStore(); testKeyValueAPI(storeHandle1, UUID.randomUUID().toString()); TableAPI tableAPI1 = local.getStore().getTableAPI(); testTableAPI(tableAPI1, UUID.randomUUID().toString()); /* stop store and start it again, the store handle and tableAPI handle * should still work */ local.stop(); local = KVLocal.start(config1); testKeyValueAPI(storeHandle1, UUID.randomUUID().toString()); testTableAPI(tableAPI1, UUID.randomUUID().toString()); /* * Close store via KVStore.close() API. The storeHandle1 should * signal error. */ storeHandle1.close(); try { testKeyValueAPI(storeHandle1, UUID.randomUUID().toString()); } catch (IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Request dispatcher has been shutdown")); } local.closeStore(); local.stop(); /* Invoke getStore when store is not running. */ checkException(() -> local.getStore(), FaultException.class, (testMode == TestMode.INET_SECURE) ? "Could not establish an initial login" : "Could not contact any RepNode"); /* Invoke key/value API when store is not running */ try { testKeyValueAPI(storeHandle1, UUID.randomUUID().toString()); fail("key/value API didn't fail"); } catch(IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Request dispatcher has been shutdown")); } /* Invoke TableAPI when KVLocal is not running. */ try { testTableAPI(tableAPI1, UUID.randomUUID().toString()); fail("TableAPI didn't fail"); } catch(IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage().contains ("Request dispatcher has been shutdown")); } rootDir = makeTestDir("getStore2"); /* Configure a store in an incompatible mode */ final TestMode otherMode; switch (testMode) { case INET_SECURE: otherMode = TestMode.INET_NONSECURE; break; case INET_NONSECURE: otherMode = java16 ? TestMode.UNIX_DOMAIN : TestMode.INET_SECURE; break; case UNIX_DOMAIN: otherMode = TestMode.INET_SECURE; break; default: throw new AssertionError(); } KVLocalConfig config2 = getConfigBuilder(rootDir, otherMode).build(); local = KVLocal.start(config2); KVStore storeHandle2 = local.getStore(); testKeyValueAPI(storeHandle2, UUID.randomUUID().toString()); TableAPI tableAPI2 = local.getStore().getTableAPI(); testTableAPI(tableAPI2, UUID.randomUUID().toString()); local.closeStore(); local.stop(); /* Invoke getStore when store is not running. */ checkException(() -> local.getStore(), FaultException.class, (otherMode == TestMode.INET_SECURE) ? "Could not establish an initial login" : "Could not contact any RepNode"); /* Invoke closeStore when store is not running. */ storeHandle2.close(); storeHandle2.close(); local = null; } /** * Test starting KVLocal when there is an existing KVLite running on the * same port. */ @Test public void testGetStoreRunningKVLite() throws Exception { final String rootDir = makeTestDir("getStoreRunningKVLite"); /* * Use a different store name because it seems that KVLite modifies the * default client socket factory otherwise, which causes trouble for * KVLocal */ final String storeName = "kvlite"; /* Create a separate KVLite */ final KVLite kvlite = new KVLite( rootDir, storeName, KVLocalConfig.DEFAULT_PORT, true /* runBootAdmin */, KVLocalConfig.InetBuilder.DEFAULT_HOSTNAME, null /* haPortRange */, null /* servicePortRange */, 1 /* numPartitions */, null /* mountPoint */, true /* useThreads */, true /* isSecure */, null /* restoreSnapshotName */); kvlite.setMemoryMB(KVLocalConfig.DEFAULT_MEMORY_SIZE_MB); kvlite.setStorageSizeGB(KVLocalConfig.DEFAULT_STORAGE_SIZE_GB); kvlite.setVerbose(false); /* Start it and wait for it to be ready */ tearDowns.add(() -> kvlite.stop(false)); kvlite.start(true /* waitForServices */); /* Wait for the user security file to be created */ final File securityPath = new File(KVLocal.getSecurityPath(rootDir)); assertTrue(PollCondition.await(1000, 30000, securityPath::exists)); /* Create KVLocal, but don't start it, and get KVStore */ final KVLocal kvlocal = KVLocal.getKVLocal(rootDir); /* Wait for the store to be working */ assertTrue( PollCondition.await( 1000, 30000, () -> { try { return KVLocal.isStoreReady(kvlocal.getStore()); } catch (IllegalStateException e) { return false; } })); /* * Try starting KVLocal, should fail more quickly than the 100 second * start up timeout because it should detect that the service is not * coming up. */ final KVLocalConfig config = new KVLocalConfig.InetBuilder(rootDir) .setStoreName(storeName) .build(); CompletableFuture.runAsync( () -> checkException(() -> KVLocal.start(config), KVLocalException.class, "Service was not started")) .get(30, SECONDS); /* Try with a KVLocal in another directory */ final String rootDir2 = makeTestDir("getStoreRunningKVLite2"); final KVLocalConfig config2 = new KVLocalConfig.InetBuilder(rootDir2).build(); CompletableFuture.runAsync( () -> checkException(() -> KVLocal.start(config2), KVLocalException.class, "Service was not started")) .get(30, SECONDS); } /* Basic test */ @Test public void testUnixDomainSocket() throws Exception { final String rootDir = makeTestDir("unixDomain"); final KVLocalConfig config = new KVLocalConfig.UnixDomainBuilder(rootDir).build(); if (!java16) { checkException(() -> KVLocal.start(config), IllegalStateException.class, "Starting KVLocal using Unix domain sockets" + " requires Java 16"); } else { local = KVLocal.start(config); } } /** Test using a too-long directory name [KVSTORE-1478] */ @Test public void testUnixDomainSocketDirTooLong() throws IOException { /* * Create a test directory name that is long enough to exceed the Unix * domain socket pathname limit -- about 100 -- but not too long that * the file system can't create the directory. */ final char[] chars = new char[100]; Arrays.fill(chars, 'a'); final String rootDir = makeTestDir("long-" + new String(chars)); final KVLocalConfig config = new KVLocalConfig.UnixDomainBuilder(rootDir).build(); if (!java16) { checkException(() -> KVLocal.start(config), IllegalStateException.class, "Starting KVLocal using Unix domain sockets" + " requires Java 16"); } else { checkException(() -> { local = KVLocal.start(config); }, KVLocalException.class, "path too long"); } } private void testKeyValueAPI(KVStore store, String random) throws Exception { final Key key = Key.createKey("key: " + random); final String valueString = "value: " + random; final Value value = Value.createValue(valueString.getBytes()); store.put(key, value); final ValueVersion valueVersion = store.get(key); assertEquals(value, valueVersion.getValue()); String statement = "CREATE TABLE if not exists employee (" + "id STRING, " + "firstName STRING, " + "lastName STRING, " + "PRIMARY KEY (id))"; ExecutionFuture future = store.execute(statement); StatementResult statementRes = future.get(); if (future != null) { assertTrue(future.isDone()); assertFalse(future.isCancelled()); } assertTrue(statementRes.toString(), statementRes.isSuccessful()); assertTrue(statementRes.isDone()); } private void testTableAPI(TableAPI tableAPI, String random) { Table employee = tableAPI.getTable("employee"); Row row = employee.createRow(); row.put("id", random); row.put("firstName", "firstName"+random); row.put("lastName", "lastName"+random); tableAPI.put(row, null, null); final PrimaryKey key = employee.createPrimaryKey(); key.put("id", random); final String value = tableAPI.get(key, null).get("firstName") .asString().get(); assertEquals("firstName" + random, value); } private void loadData(String random) throws Exception { KVStore store = local.getStore(); testKeyValueAPI(store, random); TableAPI tableAPI = local.getStore().getTableAPI(); testTableAPI(tableAPI, random); local.closeStore(); } /** * Verify whether the employee record with the specified key existed. * Return true if record exists, false otherwise. */ private boolean verifyDataExisted(String key) throws Exception { TableAPI tableAPI = local.getStore().getTableAPI(); Table employee = tableAPI.getTable("employee"); final PrimaryKey primaryKey = employee.createPrimaryKey(); primaryKey.put("id", key); final Row row = tableAPI.get(primaryKey, null); return (row != null); } private String makeTestDir(String subDir) throws IOException { testDir = Files.createTempDirectory(subDir).toFile(); return testDir.toString(); } }
googleapis/google-cloud-java
34,748
java-video-intelligence/proto-google-cloud-video-intelligence-v1p2beta1/src/main/java/com/google/cloud/videointelligence/v1p2beta1/VideoIntelligenceServiceProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1p2beta1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1p2beta1; public final class VideoIntelligenceServiceProto { private VideoIntelligenceServiceProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_Entity_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_Entity_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + "Agoogle/cloud/videointelligence/v1p2beta1/video_intelligence.proto\022(google.clou" + "d.videointelligence.v1p2beta1\032\034google/ap" + "i/annotations.proto\032\027google/api/client.p" + "roto\032\037google/api/field_behavior.proto\032#g" + "oogle/longrunning/operations.proto\032\036goog" + "le/protobuf/duration.proto\032\037google/proto" + "buf/timestamp.proto\032\027google/rpc/status.proto\"\214\002\n" + "\024AnnotateVideoRequest\022\021\n" + "\tinput_uri\030\001 \001(\t\022\025\n\r" + "input_content\030\006 \001(\014\022H\n" + "\010features\030\002" + " \003(\01621.google.cloud.videointelligence.v1p2beta1.FeatureB\003\340A\002\022M\n\r" + "video_context\030\003" + " \001(\01326.google.cloud.videointelligence.v1p2beta1.VideoContext\022\027\n\n" + "output_uri\030\004 \001(\tB\003\340A\001\022\030\n" + "\013location_id\030\005 \001(\tB\003\340A\001\"\366\003\n" + "\014VideoContext\022H\n" + "\010segments\030\001 \003(\01326.google." + "cloud.videointelligence.v1p2beta1.VideoSegment\022^\n" + "\026label_detection_config\030\002 \001(\0132>" + ".google.cloud.videointelligence.v1p2beta1.LabelDetectionConfig\022i\n" + "\034shot_change_detection_config\030\003 \001(\0132C.google.cloud.vide" + "ointelligence.v1p2beta1.ShotChangeDetectionConfig\022s\n" + "!explicit_content_detection_config\030\004 \001(\0132H.google.cloud.videointelli" + "gence.v1p2beta1.ExplicitContentDetectionConfig\022\\\n" + "\025text_detection_config\030\010 \001(\0132=." + "google.cloud.videointelligence.v1p2beta1.TextDetectionConfig\"\234\001\n" + "\024LabelDetectionConfig\022Z\n" + "\024label_detection_mode\030\001 \001(\0162<.go" + "ogle.cloud.videointelligence.v1p2beta1.LabelDetectionMode\022\031\n" + "\021stationary_camera\030\002 \001(\010\022\r\n" + "\005model\030\003 \001(\t\"*\n" + "\031ShotChangeDetectionConfig\022\r\n" + "\005model\030\001 \001(\t\"/\n" + "\036ExplicitContentDetectionConfig\022\r\n" + "\005model\030\001 \001(\t\"-\n" + "\023TextDetectionConfig\022\026\n" + "\016language_hints\030\001 \003(\t\"x\n" + "\014VideoSegment\0224\n" + "\021start_time_offset\030\001 \001(\0132\031.google.protobuf.Duration\0222\n" + "\017end_time_offset\030\002 \001(\0132\031.google.protobuf.Duration\"k\n" + "\014LabelSegment\022G\n" + "\007segment\030\001 \001(\01326.goo" + "gle.cloud.videointelligence.v1p2beta1.VideoSegment\022\022\n\n" + "confidence\030\002 \001(\002\"P\n\n" + "LabelFrame\022.\n" + "\013time_offset\030\001 \001(\0132\031.google.protobuf.Duration\022\022\n\n" + "confidence\030\002 \001(\002\"G\n" + "\006Entity\022\021\n" + "\tentity_id\030\001 \001(\t\022\023\n" + "\013description\030\002 \001(\t\022\025\n\r" + "language_code\030\003 \001(\t\"\260\002\n" + "\017LabelAnnotation\022@\n" + "\006entity\030\001 \001(\01320.google.cloud.videointelligence.v1p2beta1.Entity\022K\n" + "\021category_entities\030\002" + " \003(\01320.google.cloud.videointelligence.v1p2beta1.Entity\022H\n" + "\010segments\030\003" + " \003(\01326.google.cloud.videointelligence.v1p2beta1.LabelSegment\022D\n" + "\006frames\030\004 \003(\01324" + ".google.cloud.videointelligence.v1p2beta1.LabelFrame\"\234\001\n" + "\024ExplicitContentFrame\022.\n" + "\013time_offset\030\001 \001(\0132\031.google.protobuf.Duration\022T\n" + "\026pornography_likelihood\030\002 \001(\01624." + "google.cloud.videointelligence.v1p2beta1.Likelihood\"k\n" + "\031ExplicitContentAnnotation\022N\n" + "\006frames\030\001 \003(\0132>.google.cloud.videoint" + "elligence.v1p2beta1.ExplicitContentFrame\"Q\n" + "\025NormalizedBoundingBox\022\014\n" + "\004left\030\001 \001(\002\022\013\n" + "\003top\030\002 \001(\002\022\r\n" + "\005right\030\003 \001(\002\022\016\n" + "\006bottom\030\004 \001(\002\"\313\005\n" + "\026VideoAnnotationResults\022\021\n" + "\tinput_uri\030\001 \001(\t\022\\\n" + "\031segment_label_annotations\030\002" + " \003(\01329.google.cloud.videointelligence.v1p2beta1.LabelAnnotation\022Y\n" + "\026shot_label_annotations\030\003" + " \003(\01329.google.cloud.videointelligence.v1p2beta1.LabelAnnotation\022Z\n" + "\027frame_label_annotations\030\004 \003(\01329.google.clo" + "ud.videointelligence.v1p2beta1.LabelAnnotation\022P\n" + "\020shot_annotations\030\006 \003(\01326.googl" + "e.cloud.videointelligence.v1p2beta1.VideoSegment\022`\n" + "\023explicit_annotation\030\007 \001(\0132C." + "google.cloud.videointelligence.v1p2beta1.ExplicitContentAnnotation\022R\n" + "\020text_annotations\030\014" + " \003(\01328.google.cloud.videointelligence.v1p2beta1.TextAnnotation\022^\n" + "\022object_annotations\030\016 \003(\0132B.google.cloud.videoi" + "ntelligence.v1p2beta1.ObjectTrackingAnnotation\022!\n" + "\005error\030\t \001(\0132\022.google.rpc.Status\"u\n" + "\025AnnotateVideoResponse\022\\\n" + "\022annotation_results\030\001 \003(\0132@.google.cloud.videointel" + "ligence.v1p2beta1.VideoAnnotationResults\"\247\001\n" + "\027VideoAnnotationProgress\022\021\n" + "\tinput_uri\030\001 \001(\t\022\030\n" + "\020progress_percent\030\002 \001(\005\022.\n\n" + "start_time\030\003 \001(\0132\032.google.protobuf.Timestamp\022/\n" + "\013update_time\030\004 \001(\0132\032.google.protobuf.Timestamp\"w\n" + "\025AnnotateVideoProgress\022^\n" + "\023annotation_progress\030\001 \003(\0132A.google.cloud." + "videointelligence.v1p2beta1.VideoAnnotationProgress\"(\n" + "\020NormalizedVertex\022\t\n" + "\001x\030\001 \001(\002\022\t\n" + "\001y\030\002 \001(\002\"f\n" + "\026NormalizedBoundingPoly\022L\n" + "\010vertices\030\001 \003(\0132:.google.cloud.videoin" + "telligence.v1p2beta1.NormalizedVertex\"\257\001\n" + "\013TextSegment\022G\n" + "\007segment\030\001 \001(\01326.google." + "cloud.videointelligence.v1p2beta1.VideoSegment\022\022\n\n" + "confidence\030\002 \001(\002\022C\n" + "\006frames\030\003 \003" + "(\01323.google.cloud.videointelligence.v1p2beta1.TextFrame\"\233\001\n" + "\tTextFrame\022^\n" + "\024rotated_bounding_box\030\001 \001(\0132@.google.cloud.video" + "intelligence.v1p2beta1.NormalizedBoundingPoly\022.\n" + "\013time_offset\030\002 \001(\0132\031.google.protobuf.Duration\"g\n" + "\016TextAnnotation\022\014\n" + "\004text\030\001 \001(\t\022G\n" + "\010segments\030\002 \003(\01325.google.cloud.v" + "ideointelligence.v1p2beta1.TextSegment\"\247\001\n" + "\023ObjectTrackingFrame\022`\n" + "\027normalized_bounding_box\030\001 \001(\0132?.google.cloud.videointe" + "lligence.v1p2beta1.NormalizedBoundingBox\022.\n" + "\013time_offset\030\002 \001(\0132\031.google.protobuf.Duration\"\254\002\n" + "\030ObjectTrackingAnnotation\022I\n" + "\007segment\030\003" + " \001(\01326.google.cloud.videointelligence.v1p2beta1.VideoSegmentH\000\022\022\n" + "\010track_id\030\005 \001(\003H\000\022@\n" + "\006entity\030\001 \001(\01320.google.cl" + "oud.videointelligence.v1p2beta1.Entity\022\022\n\n" + "confidence\030\004 \001(\002\022M\n" + "\006frames\030\002 \003(\0132=.goo" + "gle.cloud.videointelligence.v1p2beta1.ObjectTrackingFrameB\014\n\n" + "track_info*\233\001\n" + "\007Feature\022\027\n" + "\023FEATURE_UNSPECIFIED\020\000\022\023\n" + "\017LABEL_DETECTION\020\001\022\031\n" + "\025SHOT_CHANGE_DETECTION\020\002\022\036\n" + "\032EXPLICIT_CONTENT_DETECTION\020\003\022\022\n" + "\016TEXT_DETECTION\020\007\022\023\n" + "\017OBJECT_TRACKING\020\t*r\n" + "\022LabelDetectionMode\022$\n" + " LABEL_DETECTION_MODE_UNSPECIFIED\020\000\022\r\n" + "\tSHOT_MODE\020\001\022\016\n\n" + "FRAME_MODE\020\002\022\027\n" + "\023SHOT_AND_FRAME_MODE\020\003*t\n\n" + "Likelihood\022\032\n" + "\026LIKELIHOOD_UNSPECIFIED\020\000\022\021\n\r" + "VERY_UNLIKELY\020\001\022\014\n" + "\010UNLIKELY\020\002\022\014\n" + "\010POSSIBLE\020\003\022\n\n" + "\006LIKELY\020\004\022\017\n" + "\013VERY_LIKELY\020\0052\316\002\n" + "\030VideoIntelligenceService\022\333\001\n\r" + "AnnotateVideo\022>.google.cloud.videointelligence.v1p2beta1.Annota" + "teVideoRequest\032\035.google.longrunning.Operation\"k\312A.\n" + "\025AnnotateVideoResponse\022\025Annot" + "ateVideoProgress\332A\022input_uri,features\202\323\344\223\002\037\"\032/v1p2beta1/videos:annotate:\001*\032T\312A" + " videointelligence.googleapis.com\322A.https:" + "//www.googleapis.com/auth/cloud-platformB\257\002\n" + ",com.google.cloud.videointelligence.v1p2beta1B\035VideoIntelligenceServiceProto" + "P\001ZZcloud.google.com/go/videointelligence/apiv1p2beta1/videointelligencepb;video" + "intelligencepb\252\002(Google.Cloud.VideoIntel" + "ligence.V1P2Beta1\312\002(Google\\Cloud\\VideoIn" + "telligence\\V1p2beta1\352\002+Google::Cloud::VideoIntelligence::V1p2beta1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.api.ClientProto.getDescriptor(), com.google.api.FieldBehaviorProto.getDescriptor(), com.google.longrunning.OperationsProto.getDescriptor(), com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), com.google.rpc.StatusProto.getDescriptor(), }); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_descriptor, new java.lang.String[] { "InputUri", "InputContent", "Features", "VideoContext", "OutputUri", "LocationId", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_descriptor, new java.lang.String[] { "Segments", "LabelDetectionConfig", "ShotChangeDetectionConfig", "ExplicitContentDetectionConfig", "TextDetectionConfig", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_descriptor, new java.lang.String[] { "LabelDetectionMode", "StationaryCamera", "Model", }); internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_descriptor, new java.lang.String[] { "Model", }); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor, new java.lang.String[] { "Model", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_descriptor, new java.lang.String[] { "LanguageHints", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_descriptor, new java.lang.String[] { "StartTimeOffset", "EndTimeOffset", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_descriptor, new java.lang.String[] { "Segment", "Confidence", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_descriptor, new java.lang.String[] { "TimeOffset", "Confidence", }); internal_static_google_cloud_videointelligence_v1p2beta1_Entity_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_videointelligence_v1p2beta1_Entity_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_Entity_descriptor, new java.lang.String[] { "EntityId", "Description", "LanguageCode", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_descriptor, new java.lang.String[] { "Entity", "CategoryEntities", "Segments", "Frames", }); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_descriptor, new java.lang.String[] { "TimeOffset", "PornographyLikelihood", }); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_descriptor, new java.lang.String[] { "Frames", }); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_descriptor, new java.lang.String[] { "Left", "Top", "Right", "Bottom", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_descriptor, new java.lang.String[] { "InputUri", "SegmentLabelAnnotations", "ShotLabelAnnotations", "FrameLabelAnnotations", "ShotAnnotations", "ExplicitAnnotation", "TextAnnotations", "ObjectAnnotations", "Error", }); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor, new java.lang.String[] { "AnnotationResults", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_descriptor, new java.lang.String[] { "InputUri", "ProgressPercent", "StartTime", "UpdateTime", }); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_descriptor, new java.lang.String[] { "AnnotationProgress", }); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_descriptor, new java.lang.String[] { "X", "Y", }); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_descriptor, new java.lang.String[] { "Vertices", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_descriptor, new java.lang.String[] { "Segment", "Confidence", "Frames", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_descriptor, new java.lang.String[] { "RotatedBoundingBox", "TimeOffset", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor, new java.lang.String[] { "Text", "Segments", }); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor, new java.lang.String[] { "NormalizedBoundingBox", "TimeOffset", }); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_descriptor, new java.lang.String[] { "Segment", "TrackId", "Entity", "Confidence", "Frames", "TrackInfo", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ClientProto.oauthScopes); registry.add(com.google.longrunning.OperationsProto.operationInfo); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); com.google.api.ClientProto.getDescriptor(); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.longrunning.OperationsProto.getDescriptor(); com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); com.google.rpc.StatusProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }